aboutsummaryrefslogtreecommitdiffstats
path: root/keystone-moon/keystone/tests
diff options
context:
space:
mode:
authorWuKong <rebirthmonkey@gmail.com>2015-06-30 18:47:29 +0200
committerWuKong <rebirthmonkey@gmail.com>2015-06-30 18:47:29 +0200
commitb8c756ecdd7cced1db4300935484e8c83701c82e (patch)
tree87e51107d82b217ede145de9d9d59e2100725bd7 /keystone-moon/keystone/tests
parentc304c773bae68fb854ed9eab8fb35c4ef17cf136 (diff)
migrate moon code from github to opnfv
Change-Id: Ice53e368fd1114d56a75271aa9f2e598e3eba604 Signed-off-by: WuKong <rebirthmonkey@gmail.com>
Diffstat (limited to 'keystone-moon/keystone/tests')
-rw-r--r--keystone-moon/keystone/tests/__init__.py0
-rw-r--r--keystone-moon/keystone/tests/moon/__init__.py4
-rw-r--r--keystone-moon/keystone/tests/moon/func/__init__.py4
-rw-r--r--keystone-moon/keystone/tests/moon/func/test_func_api_authz.py129
-rw-r--r--keystone-moon/keystone/tests/moon/func/test_func_api_intra_extension_admin.py1011
-rw-r--r--keystone-moon/keystone/tests/moon/func/test_func_api_log.py148
-rw-r--r--keystone-moon/keystone/tests/moon/func/test_func_api_tenant.py154
-rw-r--r--keystone-moon/keystone/tests/moon/unit/__init__.py4
-rw-r--r--keystone-moon/keystone/tests/moon/unit/test_unit_core_intra_extension_admin.py1229
-rw-r--r--keystone-moon/keystone/tests/moon/unit/test_unit_core_intra_extension_authz.py861
-rw-r--r--keystone-moon/keystone/tests/moon/unit/test_unit_core_log.py4
-rw-r--r--keystone-moon/keystone/tests/moon/unit/test_unit_core_tenant.py162
-rw-r--r--keystone-moon/keystone/tests/unit/__init__.py41
-rw-r--r--keystone-moon/keystone/tests/unit/backend/__init__.py0
-rw-r--r--keystone-moon/keystone/tests/unit/backend/core_ldap.py161
-rw-r--r--keystone-moon/keystone/tests/unit/backend/core_sql.py53
-rw-r--r--keystone-moon/keystone/tests/unit/backend/domain_config/__init__.py0
-rw-r--r--keystone-moon/keystone/tests/unit/backend/domain_config/core.py523
-rw-r--r--keystone-moon/keystone/tests/unit/backend/domain_config/test_sql.py41
-rw-r--r--keystone-moon/keystone/tests/unit/backend/role/__init__.py0
-rw-r--r--keystone-moon/keystone/tests/unit/backend/role/core.py130
-rw-r--r--keystone-moon/keystone/tests/unit/backend/role/test_ldap.py161
-rw-r--r--keystone-moon/keystone/tests/unit/backend/role/test_sql.py40
-rw-r--r--keystone-moon/keystone/tests/unit/catalog/__init__.py0
-rw-r--r--keystone-moon/keystone/tests/unit/catalog/test_core.py74
-rw-r--r--keystone-moon/keystone/tests/unit/common/__init__.py0
-rw-r--r--keystone-moon/keystone/tests/unit/common/test_base64utils.py208
-rw-r--r--keystone-moon/keystone/tests/unit/common/test_connection_pool.py119
-rw-r--r--keystone-moon/keystone/tests/unit/common/test_injection.py293
-rw-r--r--keystone-moon/keystone/tests/unit/common/test_json_home.py91
-rw-r--r--keystone-moon/keystone/tests/unit/common/test_ldap.py502
-rw-r--r--keystone-moon/keystone/tests/unit/common/test_notifications.py974
-rw-r--r--keystone-moon/keystone/tests/unit/common/test_pemutils.py337
-rw-r--r--keystone-moon/keystone/tests/unit/common/test_sql_core.py52
-rw-r--r--keystone-moon/keystone/tests/unit/common/test_utils.py164
-rw-r--r--keystone-moon/keystone/tests/unit/config_files/backend_db2.conf4
-rw-r--r--keystone-moon/keystone/tests/unit/config_files/backend_ldap.conf5
-rw-r--r--keystone-moon/keystone/tests/unit/config_files/backend_ldap_pool.conf41
-rw-r--r--keystone-moon/keystone/tests/unit/config_files/backend_ldap_sql.conf14
-rw-r--r--keystone-moon/keystone/tests/unit/config_files/backend_liveldap.conf14
-rw-r--r--keystone-moon/keystone/tests/unit/config_files/backend_multi_ldap_sql.conf9
-rw-r--r--keystone-moon/keystone/tests/unit/config_files/backend_mysql.conf4
-rw-r--r--keystone-moon/keystone/tests/unit/config_files/backend_pool_liveldap.conf35
-rw-r--r--keystone-moon/keystone/tests/unit/config_files/backend_postgresql.conf4
-rw-r--r--keystone-moon/keystone/tests/unit/config_files/backend_sql.conf8
-rw-r--r--keystone-moon/keystone/tests/unit/config_files/backend_tls_liveldap.conf17
-rw-r--r--keystone-moon/keystone/tests/unit/config_files/deprecated.conf8
-rw-r--r--keystone-moon/keystone/tests/unit/config_files/deprecated_override.conf15
-rw-r--r--keystone-moon/keystone/tests/unit/config_files/domain_configs_default_ldap_one_sql/keystone.domain1.conf5
-rw-r--r--keystone-moon/keystone/tests/unit/config_files/domain_configs_multi_ldap/keystone.Default.conf14
-rw-r--r--keystone-moon/keystone/tests/unit/config_files/domain_configs_multi_ldap/keystone.domain1.conf11
-rw-r--r--keystone-moon/keystone/tests/unit/config_files/domain_configs_multi_ldap/keystone.domain2.conf13
-rw-r--r--keystone-moon/keystone/tests/unit/config_files/domain_configs_one_extra_sql/keystone.domain2.conf5
-rw-r--r--keystone-moon/keystone/tests/unit/config_files/domain_configs_one_sql_one_ldap/keystone.Default.conf14
-rw-r--r--keystone-moon/keystone/tests/unit/config_files/domain_configs_one_sql_one_ldap/keystone.domain1.conf5
-rw-r--r--keystone-moon/keystone/tests/unit/config_files/test_auth_plugin.conf7
-rw-r--r--keystone-moon/keystone/tests/unit/core.py660
-rw-r--r--keystone-moon/keystone/tests/unit/default_catalog.templates14
-rw-r--r--keystone-moon/keystone/tests/unit/default_fixtures.py121
-rw-r--r--keystone-moon/keystone/tests/unit/fakeldap.py602
-rw-r--r--keystone-moon/keystone/tests/unit/federation_fixtures.py28
-rw-r--r--keystone-moon/keystone/tests/unit/filtering.py96
-rw-r--r--keystone-moon/keystone/tests/unit/identity/__init__.py0
-rw-r--r--keystone-moon/keystone/tests/unit/identity/test_core.py125
-rw-r--r--keystone-moon/keystone/tests/unit/identity_mapping.py23
-rw-r--r--keystone-moon/keystone/tests/unit/ksfixtures/__init__.py15
-rw-r--r--keystone-moon/keystone/tests/unit/ksfixtures/appserver.py79
-rw-r--r--keystone-moon/keystone/tests/unit/ksfixtures/cache.py36
-rw-r--r--keystone-moon/keystone/tests/unit/ksfixtures/database.py124
-rw-r--r--keystone-moon/keystone/tests/unit/ksfixtures/hacking.py489
-rw-r--r--keystone-moon/keystone/tests/unit/ksfixtures/key_repository.py34
-rw-r--r--keystone-moon/keystone/tests/unit/ksfixtures/temporaryfile.py29
-rw-r--r--keystone-moon/keystone/tests/unit/mapping_fixtures.py1023
-rw-r--r--keystone-moon/keystone/tests/unit/rest.py245
-rw-r--r--keystone-moon/keystone/tests/unit/saml2/idp_saml2_metadata.xml25
-rw-r--r--keystone-moon/keystone/tests/unit/saml2/signed_saml2_assertion.xml63
-rw-r--r--keystone-moon/keystone/tests/unit/test_associate_project_endpoint_extension.py1129
-rw-r--r--keystone-moon/keystone/tests/unit/test_auth.py1328
-rw-r--r--keystone-moon/keystone/tests/unit/test_auth_plugin.py220
-rw-r--r--keystone-moon/keystone/tests/unit/test_backend.py5741
-rw-r--r--keystone-moon/keystone/tests/unit/test_backend_endpoint_policy.py247
-rw-r--r--keystone-moon/keystone/tests/unit/test_backend_endpoint_policy_sql.py37
-rw-r--r--keystone-moon/keystone/tests/unit/test_backend_federation_sql.py46
-rw-r--r--keystone-moon/keystone/tests/unit/test_backend_id_mapping_sql.py197
-rw-r--r--keystone-moon/keystone/tests/unit/test_backend_kvs.py172
-rw-r--r--keystone-moon/keystone/tests/unit/test_backend_ldap.py3049
-rw-r--r--keystone-moon/keystone/tests/unit/test_backend_ldap_pool.py244
-rw-r--r--keystone-moon/keystone/tests/unit/test_backend_rules.py62
-rw-r--r--keystone-moon/keystone/tests/unit/test_backend_sql.py948
-rw-r--r--keystone-moon/keystone/tests/unit/test_backend_templated.py127
-rw-r--r--keystone-moon/keystone/tests/unit/test_cache.py322
-rw-r--r--keystone-moon/keystone/tests/unit/test_cache_backend_mongo.py727
-rw-r--r--keystone-moon/keystone/tests/unit/test_catalog.py219
-rw-r--r--keystone-moon/keystone/tests/unit/test_cert_setup.py246
-rw-r--r--keystone-moon/keystone/tests/unit/test_cli.py252
-rw-r--r--keystone-moon/keystone/tests/unit/test_config.py84
-rw-r--r--keystone-moon/keystone/tests/unit/test_contrib_s3_core.py55
-rw-r--r--keystone-moon/keystone/tests/unit/test_contrib_simple_cert.py57
-rw-r--r--keystone-moon/keystone/tests/unit/test_driver_hints.py60
-rw-r--r--keystone-moon/keystone/tests/unit/test_ec2_token_middleware.py34
-rw-r--r--keystone-moon/keystone/tests/unit/test_exception.py227
-rw-r--r--keystone-moon/keystone/tests/unit/test_hacking_checks.py143
-rw-r--r--keystone-moon/keystone/tests/unit/test_ipv6.py51
-rw-r--r--keystone-moon/keystone/tests/unit/test_kvs.py581
-rw-r--r--keystone-moon/keystone/tests/unit/test_ldap_livetest.py229
-rw-r--r--keystone-moon/keystone/tests/unit/test_ldap_pool_livetest.py208
-rw-r--r--keystone-moon/keystone/tests/unit/test_ldap_tls_livetest.py122
-rw-r--r--keystone-moon/keystone/tests/unit/test_middleware.py119
-rw-r--r--keystone-moon/keystone/tests/unit/test_no_admin_token_auth.py59
-rw-r--r--keystone-moon/keystone/tests/unit/test_policy.py228
-rw-r--r--keystone-moon/keystone/tests/unit/test_revoke.py637
-rw-r--r--keystone-moon/keystone/tests/unit/test_singular_plural.py48
-rw-r--r--keystone-moon/keystone/tests/unit/test_sql_livetest.py73
-rw-r--r--keystone-moon/keystone/tests/unit/test_sql_migrate_extensions.py380
-rw-r--r--keystone-moon/keystone/tests/unit/test_sql_upgrade.py957
-rw-r--r--keystone-moon/keystone/tests/unit/test_ssl.py176
-rw-r--r--keystone-moon/keystone/tests/unit/test_token_bind.py198
-rw-r--r--keystone-moon/keystone/tests/unit/test_token_provider.py836
-rw-r--r--keystone-moon/keystone/tests/unit/test_url_middleware.py53
-rw-r--r--keystone-moon/keystone/tests/unit/test_v2.py1500
-rw-r--r--keystone-moon/keystone/tests/unit/test_v2_controller.py95
-rw-r--r--keystone-moon/keystone/tests/unit/test_v2_keystoneclient.py1045
-rw-r--r--keystone-moon/keystone/tests/unit/test_v2_keystoneclient_sql.py344
-rw-r--r--keystone-moon/keystone/tests/unit/test_v3.py1283
-rw-r--r--keystone-moon/keystone/tests/unit/test_v3_assignment.py2943
-rw-r--r--keystone-moon/keystone/tests/unit/test_v3_auth.py4494
-rw-r--r--keystone-moon/keystone/tests/unit/test_v3_catalog.py746
-rw-r--r--keystone-moon/keystone/tests/unit/test_v3_controller.py52
-rw-r--r--keystone-moon/keystone/tests/unit/test_v3_credential.py406
-rw-r--r--keystone-moon/keystone/tests/unit/test_v3_domain_config.py210
-rw-r--r--keystone-moon/keystone/tests/unit/test_v3_endpoint_policy.py251
-rw-r--r--keystone-moon/keystone/tests/unit/test_v3_federation.py3296
-rw-r--r--keystone-moon/keystone/tests/unit/test_v3_filters.py452
-rw-r--r--keystone-moon/keystone/tests/unit/test_v3_identity.py584
-rw-r--r--keystone-moon/keystone/tests/unit/test_v3_oauth1.py891
-rw-r--r--keystone-moon/keystone/tests/unit/test_v3_os_revoke.py135
-rw-r--r--keystone-moon/keystone/tests/unit/test_v3_policy.py68
-rw-r--r--keystone-moon/keystone/tests/unit/test_v3_protection.py1170
-rw-r--r--keystone-moon/keystone/tests/unit/test_validation.py1563
-rw-r--r--keystone-moon/keystone/tests/unit/test_versions.py1051
-rw-r--r--keystone-moon/keystone/tests/unit/test_wsgi.py427
-rw-r--r--keystone-moon/keystone/tests/unit/tests/__init__.py0
-rw-r--r--keystone-moon/keystone/tests/unit/tests/test_core.py62
-rw-r--r--keystone-moon/keystone/tests/unit/tests/test_utils.py37
-rw-r--r--keystone-moon/keystone/tests/unit/token/__init__.py0
-rw-r--r--keystone-moon/keystone/tests/unit/token/test_fernet_provider.py183
-rw-r--r--keystone-moon/keystone/tests/unit/token/test_provider.py29
-rw-r--r--keystone-moon/keystone/tests/unit/token/test_token_data_helper.py55
-rw-r--r--keystone-moon/keystone/tests/unit/token/test_token_model.py262
-rw-r--r--keystone-moon/keystone/tests/unit/utils.py89
150 files changed, 56408 insertions, 0 deletions
diff --git a/keystone-moon/keystone/tests/__init__.py b/keystone-moon/keystone/tests/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/keystone-moon/keystone/tests/__init__.py
diff --git a/keystone-moon/keystone/tests/moon/__init__.py b/keystone-moon/keystone/tests/moon/__init__.py
new file mode 100644
index 00000000..1b678d53
--- /dev/null
+++ b/keystone-moon/keystone/tests/moon/__init__.py
@@ -0,0 +1,4 @@
+# Copyright 2015 Open Platform for NFV Project, Inc. and its contributors
+# This software is distributed under the terms and conditions of the 'Apache-2.0'
+# license which can be found in the file 'LICENSE' in this package distribution
+# or at 'http://www.apache.org/licenses/LICENSE-2.0'.
diff --git a/keystone-moon/keystone/tests/moon/func/__init__.py b/keystone-moon/keystone/tests/moon/func/__init__.py
new file mode 100644
index 00000000..1b678d53
--- /dev/null
+++ b/keystone-moon/keystone/tests/moon/func/__init__.py
@@ -0,0 +1,4 @@
+# Copyright 2015 Open Platform for NFV Project, Inc. and its contributors
+# This software is distributed under the terms and conditions of the 'Apache-2.0'
+# license which can be found in the file 'LICENSE' in this package distribution
+# or at 'http://www.apache.org/licenses/LICENSE-2.0'.
diff --git a/keystone-moon/keystone/tests/moon/func/test_func_api_authz.py b/keystone-moon/keystone/tests/moon/func/test_func_api_authz.py
new file mode 100644
index 00000000..77438e95
--- /dev/null
+++ b/keystone-moon/keystone/tests/moon/func/test_func_api_authz.py
@@ -0,0 +1,129 @@
+# Copyright 2015 Open Platform for NFV Project, Inc. and its contributors
+# This software is distributed under the terms and conditions of the 'Apache-2.0'
+# license which can be found in the file 'LICENSE' in this package distribution
+# or at 'http://www.apache.org/licenses/LICENSE-2.0'.
+
+import unittest
+import json
+import httplib
+
+
+CREDENTIALS = {
+ "host": "127.0.0.1",
+ "port": "35357",
+ "login": "admin",
+ "password": "nomoresecrete",
+ "tenant_name": "demo",
+ "sessionid": "kxb50d9uusiywfcs2fiidmu1j5nsyckr",
+ "csrftoken": "",
+ "x-subject-token": ""
+}
+
+
+def get_url(url, post_data=None, delete_data=None, crsftoken=None, method="GET", authtoken=None):
+ # MOON_SERVER_IP["URL"] = url
+ # _url = "http://{HOST}:{PORT}".format(**MOON_SERVER_IP)
+ if post_data:
+ method = "POST"
+ if delete_data:
+ method = "DELETE"
+ print("\033[32m{} {}\033[m".format(method, url))
+ conn = httplib.HTTPConnection(CREDENTIALS["host"], CREDENTIALS["port"])
+ headers = {
+ "Content-type": "application/x-www-form-urlencoded",
+ # "Accept": "text/plain",
+ "Accept": "text/plain,text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8",
+ 'Cookie': 'sessionid={}'.format(CREDENTIALS["sessionid"]),
+ }
+ if crsftoken:
+ headers["Cookie"] = "csrftoken={}; sessionid={}; NG_TRANSLATE_LANG_KEY:\"en\"".format(crsftoken, CREDENTIALS["sessionid"])
+ CREDENTIALS["crsftoken"] = crsftoken
+ if authtoken:
+ headers["X-Auth-Token"] = CREDENTIALS["x-subject-token"]
+ if post_data:
+ method = "POST"
+ headers["Content-type"] = "application/json"
+ if crsftoken:
+ post_data = "&".join(map(lambda x: "=".join(x), post_data))
+ elif "crsftoken" in CREDENTIALS and "sessionid" in CREDENTIALS:
+ post_data = json.dumps(post_data)
+ headers["Cookie"] = "csrftoken={}; sessionid={}; NG_TRANSLATE_LANG_KEY:\"en\"".format(
+ CREDENTIALS["crsftoken"],
+ CREDENTIALS["sessionid"])
+ else:
+ post_data = json.dumps(post_data)
+ # conn.request(method, url, json.dumps(post_data), headers=headers)
+ conn.request(method, url, post_data, headers=headers)
+ elif delete_data:
+ method = "DELETE"
+ conn.request(method, url, json.dumps(delete_data), headers=headers)
+ else:
+ conn.request(method, url, headers=headers)
+ resp = conn.getresponse()
+ headers = resp.getheaders()
+ try:
+ CREDENTIALS["x-subject-token"] = dict(headers)["x-subject-token"]
+ except KeyError:
+ pass
+ if crsftoken:
+ sessionid_start = dict(headers)["set-cookie"].index("sessionid=")+len("sessionid=")
+ sessionid_end = dict(headers)["set-cookie"].index(";", sessionid_start)
+ sessionid = dict(headers)["set-cookie"][sessionid_start:sessionid_end]
+ CREDENTIALS["sessionid"] = sessionid
+ content = resp.read()
+ conn.close()
+ try:
+ return json.loads(content)
+ except ValueError:
+ return {"content": content}
+
+
+class AuthTest(unittest.TestCase):
+
+ def setUp(self):
+ post = {
+ "auth": {
+ "identity": {
+ "methods": [
+ "password"
+ ],
+ "password": {
+ "user": {
+ "domain": {
+ "id": "Default"
+ },
+ "name": "admin",
+ "password": "nomoresecrete"
+ }
+ }
+ },
+ "scope": {
+ "project": {
+ "domain": {
+ "id": "Default"
+ },
+ "name": "demo"
+ }
+ }
+ }
+ }
+ data = get_url("/v3/auth/tokens", post_data=post)
+ self.assertIn("token", data)
+
+ def tearDown(self):
+ pass
+
+ def test_authz(self):
+ data = get_url("/v3/OS-MOON/authz/1234567890/1111111/2222222/3333333", authtoken=True)
+ for key in ("authz", "subject_id", "tenant_id", "object_id", "action_id"):
+ self.assertIn(key, data)
+ print(data)
+ data = get_url("/v3/OS-MOON/authz/961420e0aeed4fd88e09cf4ae2ae700e/"
+ "4cff0936eeed42439d746e8071245235/df60c814-bafd-44a8-ad34-6c649e75295f/unpause", authtoken=True)
+ for key in ("authz", "subject_id", "tenant_id", "object_id", "action_id"):
+ self.assertIn(key, data)
+ print(data)
+
+
+if __name__ == "__main__":
+ unittest.main()
diff --git a/keystone-moon/keystone/tests/moon/func/test_func_api_intra_extension_admin.py b/keystone-moon/keystone/tests/moon/func/test_func_api_intra_extension_admin.py
new file mode 100644
index 00000000..607691ea
--- /dev/null
+++ b/keystone-moon/keystone/tests/moon/func/test_func_api_intra_extension_admin.py
@@ -0,0 +1,1011 @@
+# Copyright 2015 Open Platform for NFV Project, Inc. and its contributors
+# This software is distributed under the terms and conditions of the 'Apache-2.0'
+# license which can be found in the file 'LICENSE' in this package distribution
+# or at 'http://www.apache.org/licenses/LICENSE-2.0'.
+
+import unittest
+import json
+import httplib
+from uuid import uuid4
+import copy
+
+CREDENTIALS = {
+ "host": "127.0.0.1",
+ "port": "35357",
+ "login": "admin",
+ "password": "nomoresecrete",
+ "tenant_name": "demo",
+ "sessionid": "kxb50d9uusiywfcs2fiidmu1j5nsyckr",
+ "csrftoken": "",
+ "x-subject-token": ""
+}
+
+
+def get_url(url, post_data=None, delete_data=None, crsftoken=None, method="GET", authtoken=None):
+ # MOON_SERVER_IP["URL"] = url
+ # _url = "http://{HOST}:{PORT}".format(**MOON_SERVER_IP)
+ if post_data:
+ method = "POST"
+ if delete_data:
+ method = "DELETE"
+ # print("\033[32m{} {}\033[m".format(method, url))
+ conn = httplib.HTTPConnection(CREDENTIALS["host"], CREDENTIALS["port"])
+ headers = {
+ "Content-type": "application/x-www-form-urlencoded",
+ # "Accept": "text/plain",
+ "Accept": "text/plain,text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8",
+ 'Cookie': 'sessionid={}'.format(CREDENTIALS["sessionid"]),
+ }
+ if crsftoken:
+ headers["Cookie"] = "csrftoken={}; sessionid={}; NG_TRANSLATE_LANG_KEY:\"en\"".format(crsftoken, CREDENTIALS["sessionid"])
+ CREDENTIALS["crsftoken"] = crsftoken
+ if authtoken:
+ headers["X-Auth-Token"] = CREDENTIALS["x-subject-token"]
+ if post_data:
+ method = "POST"
+ headers["Content-type"] = "application/json"
+ if crsftoken:
+ post_data = "&".join(map(lambda x: "=".join(x), post_data))
+ elif "crsftoken" in CREDENTIALS and "sessionid" in CREDENTIALS:
+ post_data = json.dumps(post_data)
+ headers["Cookie"] = "csrftoken={}; sessionid={}; NG_TRANSLATE_LANG_KEY:\"en\"".format(
+ CREDENTIALS["crsftoken"],
+ CREDENTIALS["sessionid"])
+ else:
+ post_data = json.dumps(post_data)
+ # conn.request(method, url, json.dumps(post_data), headers=headers)
+ conn.request(method, url, post_data, headers=headers)
+ elif delete_data:
+ method = "DELETE"
+ conn.request(method, url, json.dumps(delete_data), headers=headers)
+ else:
+ conn.request(method, url, headers=headers)
+ resp = conn.getresponse()
+ headers = resp.getheaders()
+ try:
+ CREDENTIALS["x-subject-token"] = dict(headers)["x-subject-token"]
+ except KeyError:
+ pass
+ if crsftoken:
+ sessionid_start = dict(headers)["set-cookie"].index("sessionid=")+len("sessionid=")
+ sessionid_end = dict(headers)["set-cookie"].index(";", sessionid_start)
+ sessionid = dict(headers)["set-cookie"][sessionid_start:sessionid_end]
+ CREDENTIALS["sessionid"] = sessionid
+ content = resp.read()
+ conn.close()
+ try:
+ return json.loads(content)
+ except ValueError:
+ return {"content": content}
+
+def get_keystone_user(name="demo", intra_extension_uuid=None):
+ users = get_url("/v3/users", authtoken=True)["users"]
+ demo_user_uuid = None
+ for user in users:
+ if user["name"] == name:
+ demo_user_uuid = user["id"]
+ break
+ # if user "name" is not present, fallback to admin
+ if user["name"] == "admin":
+ demo_user_uuid = user["id"]
+ if intra_extension_uuid:
+ post_data = {"subject_id": demo_user_uuid}
+ get_url("/v3/OS-MOON/intra_extensions/{}/subjects".format(
+ intra_extension_uuid), post_data=post_data, authtoken=True)
+ return demo_user_uuid
+
+class IntraExtensionsTest(unittest.TestCase):
+
+ def setUp(self):
+ post = {
+ "auth": {
+ "identity": {
+ "methods": [
+ "password"
+ ],
+ "password": {
+ "user": {
+ "domain": {
+ "id": "Default"
+ },
+ "name": "admin",
+ "password": "nomoresecrete"
+ }
+ }
+ },
+ "scope": {
+ "project": {
+ "domain": {
+ "id": "Default"
+ },
+ "name": "demo"
+ }
+ }
+ }
+ }
+ data = get_url("/v3/auth/tokens", post_data=post)
+ self.assertIn("token", data)
+
+ def tearDown(self):
+ pass
+
+ def test_create_intra_extensions(self):
+ data = get_url("/v3/OS-MOON/intra_extensions", authtoken=True)
+ self.assertIn("intra_extensions", data)
+ data = get_url("/v3/OS-MOON/authz_policies", authtoken=True)
+ self.assertIn("authz_policies", data)
+ for model in data["authz_policies"]:
+ # Create a new intra_extension
+ new_ie = {
+ "name": "new_intra_extension",
+ "description": "new_intra_extension",
+ "policymodel": model
+ }
+ data = get_url("/v3/OS-MOON/intra_extensions/", post_data=new_ie, authtoken=True)
+ for key in [u'model', u'id', u'name', u'description']:
+ self.assertIn(key, data)
+ ie_id = data["id"]
+ data = get_url("/v3/OS-MOON/intra_extensions", authtoken=True)
+ self.assertIn(ie_id, data["intra_extensions"])
+
+ # Get all subjects
+ data = get_url("/v3/OS-MOON/intra_extensions/{}/subjects".format(ie_id), authtoken=True)
+ self.assertIn("subjects", data)
+ self.assertIs(type(data["subjects"]), dict)
+
+ # Get all objects
+ data = get_url("/v3/OS-MOON/intra_extensions/{}/objects".format(ie_id), authtoken=True)
+ self.assertIn("objects", data)
+ self.assertIsInstance(data["objects"], dict)
+
+ # Get all actions
+ data = get_url("/v3/OS-MOON/intra_extensions/{}/actions".format(ie_id), authtoken=True)
+ self.assertIn("actions", data)
+ self.assertIsInstance(data["actions"], dict)
+
+ # # get current tenant
+ # data = get_url("/v3/OS-MOON/intra_extensions/{}/tenant".format(ie_id), authtoken=True)
+ # self.assertIn("tenant", data)
+ # self.assertIn(type(data["tenant"]), (str, unicode))
+ #
+ # # set current tenant
+ # tenants = get_url("/v3/projects", authtoken=True)["projects"]
+ # post_data = {"tenant_id": ""}
+ # for tenant in tenants:
+ # if tenant["name"] == "admin":
+ # post_data = {"tenant_id": tenant["id"]}
+ # break
+ # data = get_url("/v3/OS-MOON/intra_extensions/{}/tenant".format(ie_id),
+ # post_data=post_data,
+ # authtoken=True)
+ # self.assertIn("tenant", data)
+ # self.assertIn(type(data["tenant"]), (str, unicode))
+ # self.assertEqual(data["tenant"], post_data["tenant_id"])
+ #
+ # # check current tenant
+ # data = get_url("/v3/OS-MOON/intra_extensions/{}/tenant".format(ie_id), authtoken=True)
+ # self.assertIn("tenant", data)
+ # self.assertIn(type(data["tenant"]), (str, unicode))
+ # self.assertEqual(data["tenant"], post_data["tenant_id"])
+
+ # Delete the intra_extension
+ data = get_url("/v3/OS-MOON/intra_extensions/{}".format(ie_id), method="DELETE", authtoken=True)
+ data = get_url("/v3/OS-MOON/intra_extensions", authtoken=True)
+ self.assertNotIn(ie_id, data["intra_extensions"])
+
+ def test_perimeter_data(self):
+ data = get_url("/v3/OS-MOON/intra_extensions", authtoken=True)
+ self.assertIn("intra_extensions", data)
+ data = get_url("/v3/OS-MOON/authz_policies", authtoken=True)
+ self.assertIn("authz_policies", data)
+ for model in data["authz_policies"]:
+ # Create a new intra_extension
+ new_ie = {
+ "name": "new_intra_extension",
+ "description": "new_intra_extension",
+ "policymodel": model
+ }
+ data = get_url("/v3/OS-MOON/intra_extensions/", post_data=new_ie, authtoken=True)
+ for key in [u'model', u'id', u'name', u'description']:
+ self.assertIn(key, data)
+ ie_id = data["id"]
+ data = get_url("/v3/OS-MOON/intra_extensions", authtoken=True)
+ self.assertIn(ie_id, data["intra_extensions"])
+
+ # Get all subjects
+ data = get_url("/v3/OS-MOON/intra_extensions/{}/subjects".format(ie_id), authtoken=True)
+ self.assertIn("subjects", data)
+ self.assertIs(type(data["subjects"]), dict)
+ self.assertTrue(len(data["subjects"]) > 0)
+
+ # Add a new subject
+ users = get_url("/v3/users", authtoken=True)["users"]
+ demo_user_uuid = None
+ for user in users:
+ if user["name"] == "demo":
+ demo_user_uuid = user["id"]
+ break
+ # if user demo is not present
+ if user["name"] == "admin":
+ demo_user_uuid = user["id"]
+ post_data = {"subject_id": demo_user_uuid}
+ data = get_url("/v3/OS-MOON/intra_extensions/{}/subjects".format(ie_id), post_data=post_data, authtoken=True)
+ self.assertIn("subject", data)
+ self.assertIs(type(data["subject"]), dict)
+ self.assertEqual(post_data["subject_id"], data["subject"]["uuid"])
+ data = get_url("/v3/OS-MOON/intra_extensions/{}/subjects".format(ie_id), authtoken=True)
+ self.assertIn("subjects", data)
+ self.assertIsInstance(data["subjects"], dict)
+ self.assertIn(post_data["subject_id"], data["subjects"])
+ # delete the previous subject
+ data = get_url("/v3/OS-MOON/intra_extensions/{}/subjects/{}".format(ie_id, post_data["subject_id"]),
+ method="DELETE", authtoken=True)
+ data = get_url("/v3/OS-MOON/intra_extensions/{}/subjects".format(ie_id), authtoken=True)
+ self.assertIn("subjects", data)
+ self.assertIsInstance(data["subjects"], dict)
+ self.assertNotIn(post_data["subject_id"], data["subjects"])
+
+ # Get all objects
+ data = get_url("/v3/OS-MOON/intra_extensions/{}/objects".format(ie_id), authtoken=True)
+ self.assertIn("objects", data)
+ self.assertIs(type(data["objects"]), dict)
+ self.assertTrue(len(data["objects"]) > 0)
+
+ # Add a new object
+ post_data = {"object_id": "my_new_object"}
+ data = get_url("/v3/OS-MOON/intra_extensions/{}/objects".format(ie_id), post_data=post_data, authtoken=True)
+ self.assertIn("object", data)
+ self.assertIsInstance(data["object"], dict)
+ self.assertEqual(post_data["object_id"], data["object"]["name"])
+ object_id = data["object"]["uuid"]
+ data = get_url("/v3/OS-MOON/intra_extensions/{}/objects".format(ie_id), authtoken=True)
+ self.assertIn("objects", data)
+ self.assertIsInstance(data["objects"], dict)
+ self.assertIn(post_data["object_id"], data["objects"].values())
+
+ # delete the previous object
+ data = get_url("/v3/OS-MOON/intra_extensions/{}/objects/{}".format(ie_id, object_id),
+ method="DELETE", authtoken=True)
+ data = get_url("/v3/OS-MOON/intra_extensions/{}/objects".format(ie_id), authtoken=True)
+ self.assertIn("objects", data)
+ self.assertIsInstance(data["objects"], dict)
+ self.assertNotIn(post_data["object_id"], data["objects"].values())
+
+ # Get all actions
+ data = get_url("/v3/OS-MOON/intra_extensions/{}/actions".format(ie_id), authtoken=True)
+ self.assertIn("actions", data)
+ self.assertIs(type(data["actions"]), dict)
+ self.assertTrue(len(data["actions"]) > 0)
+
+ # Add a new action
+ post_data = {"action_id": "create2"}
+ data = get_url("/v3/OS-MOON/intra_extensions/{}/actions".format(ie_id), post_data=post_data, authtoken=True)
+ action_id = data["action"]["uuid"]
+ self.assertIn("action", data)
+ self.assertIsInstance(data["action"], dict)
+ self.assertEqual(post_data["action_id"], data["action"]["name"])
+ data = get_url("/v3/OS-MOON/intra_extensions/{}/actions".format(ie_id), authtoken=True)
+ self.assertIn("actions", data)
+ self.assertIsInstance(data["actions"], dict)
+ self.assertIn(post_data["action_id"], data["actions"].values())
+
+ # delete the previous action
+ data = get_url("/v3/OS-MOON/intra_extensions/{}/actions/{}".format(ie_id, action_id),
+ method="DELETE", authtoken=True)
+ data = get_url("/v3/OS-MOON/intra_extensions/{}/actions".format(ie_id), authtoken=True)
+ self.assertIn("actions", data)
+ self.assertIsInstance(data["actions"], dict)
+ self.assertNotIn(post_data["action_id"], data["actions"])
+
+ # Delete the intra_extension
+ data = get_url("/v3/OS-MOON/intra_extensions/{}".format(ie_id), method="DELETE", authtoken=True)
+ data = get_url("/v3/OS-MOON/intra_extensions", authtoken=True)
+ self.assertNotIn(ie_id, data["intra_extensions"])
+
+ def test_assignments_data(self):
+ data = get_url("/v3/OS-MOON/intra_extensions", authtoken=True)
+ self.assertIn("intra_extensions", data)
+ data = get_url("/v3/OS-MOON/authz_policies", authtoken=True)
+ self.assertIn("authz_policies", data)
+ for model in data["authz_policies"]:
+ # Create a new intra_extension
+ new_ie = {
+ "name": "new_intra_extension",
+ "description": "new_intra_extension",
+ "policymodel": model
+ }
+ data = get_url("/v3/OS-MOON/intra_extensions/", post_data=new_ie, authtoken=True)
+ for key in [u'model', u'id', u'name', u'description']:
+ self.assertIn(key, data)
+ ie_id = data["id"]
+ data = get_url("/v3/OS-MOON/intra_extensions", authtoken=True)
+ self.assertIn(ie_id, data["intra_extensions"])
+
+ # Get all subject_assignments
+ data = get_url("/v3/OS-MOON/intra_extensions/{}/subject_assignments/{}".format(
+ ie_id, get_keystone_user(intra_extension_uuid=ie_id)), authtoken=True)
+ self.assertIn("subject_category_assignments", data)
+ self.assertIs(type(data["subject_category_assignments"]), dict)
+
+ # Add subject_assignments
+ # get one subject
+ data = get_url("/v3/OS-MOON/intra_extensions/{}/subjects".format(ie_id), authtoken=True)
+ self.assertIn("subjects", data)
+ self.assertIs(type(data["subjects"]), dict)
+ # subject_id = data["subjects"].keys()[0]
+ subject_id = get_keystone_user()
+ # get one subject category
+ data = get_url("/v3/OS-MOON/intra_extensions/{}/subject_categories".format(ie_id), authtoken=True)
+ self.assertIn("subject_categories", data)
+ self.assertIs(type(data["subject_categories"]), dict)
+ subject_category_id = data["subject_categories"].keys()[0]
+ # get all subject category scope
+ data = get_url("/v3/OS-MOON/intra_extensions/{}/subject_category_scope/{}".format(
+ ie_id, subject_category_id), authtoken=True)
+ self.assertIn("subject_category_scope", data)
+ self.assertIs(type(data["subject_category_scope"]), dict)
+ subject_category_scope_id = data["subject_category_scope"][subject_category_id].keys()[0]
+ post_data = {
+ "subject_id": subject_id,
+ "subject_category": subject_category_id,
+ "subject_category_scope": subject_category_scope_id
+ }
+ data = get_url("/v3/OS-MOON/intra_extensions/{}/subject_assignments".format(ie_id), post_data=post_data, authtoken=True)
+ self.assertIn("subject_category_assignments", data)
+ self.assertIs(type(data["subject_category_assignments"]), dict)
+ self.assertIn(post_data["subject_category"], data["subject_category_assignments"][subject_id])
+ self.assertIn(post_data["subject_category"], data["subject_category_assignments"][subject_id])
+ self.assertIn(post_data["subject_category_scope"],
+ data["subject_category_assignments"][subject_id][post_data["subject_category"]])
+ # data = get_url("/v3/OS-MOON/intra_extensions/{}/subjects".format(ie_id), authtoken=True)
+ # self.assertIn("subjects", data)
+ # self.assertIsInstance(data["subjects"], dict)
+ # self.assertIn(post_data["subject_id"], data["subjects"])
+
+ # delete the previous subject assignment
+ get_url("/v3/OS-MOON/intra_extensions/{}/subject_assignments/{}/{}/{}".format(
+ ie_id,
+ post_data["subject_id"],
+ post_data["subject_category"],
+ post_data["subject_category_scope"],
+ ),
+ method="DELETE", authtoken=True)
+ data = get_url("/v3/OS-MOON/intra_extensions/{}/subject_assignments/{}".format(
+ ie_id, get_keystone_user()), authtoken=True)
+ self.assertIn("subject_category_assignments", data)
+ self.assertIs(type(data["subject_category_assignments"]), dict)
+ if post_data["subject_category"] in data["subject_category_assignments"][subject_id]:
+ if post_data["subject_category"] in data["subject_category_assignments"][subject_id]:
+ self.assertNotIn(post_data["subject_category_scope"],
+ data["subject_category_assignments"][subject_id][post_data["subject_category"]])
+
+ # Get all object_assignments
+
+ # get one object
+ post_data = {"object_id": "my_new_object"}
+ new_object = get_url("/v3/OS-MOON/intra_extensions/{}/objects".format(ie_id), post_data=post_data, authtoken=True)
+ object_id = new_object["object"]["uuid"]
+
+ data = get_url("/v3/OS-MOON/intra_extensions/{}/object_assignments/{}".format(
+ ie_id, object_id), authtoken=True)
+ self.assertIn("object_category_assignments", data)
+ self.assertIsInstance(data["object_category_assignments"], dict)
+
+ # Add object_assignments
+ # get one object category
+ post_data = {"object_category_id": uuid4().hex}
+ object_category = get_url("/v3/OS-MOON/intra_extensions/{}/object_categories".format(ie_id),
+ post_data=post_data,
+ authtoken=True)
+ object_category_id = object_category["object_category"]["uuid"]
+ # get all object category scope
+ post_data = {
+ "object_category_id": object_category_id,
+ "object_category_scope_id": uuid4().hex
+ }
+ data = get_url("/v3/OS-MOON/intra_extensions/{}/object_category_scope".format(ie_id),
+ post_data=post_data,
+ authtoken=True)
+ object_category_scope_id = data["object_category_scope"]["uuid"]
+ data = get_url("/v3/OS-MOON/intra_extensions/{}/object_category_scope/{}".format(
+ ie_id, object_category_id), authtoken=True)
+ self.assertIn("object_category_scope", data)
+ self.assertIs(type(data["object_category_scope"]), dict)
+ post_data = {
+ "object_id": object_id,
+ "object_category": object_category_id,
+ "object_category_scope": object_category_scope_id
+ }
+ data = get_url("/v3/OS-MOON/intra_extensions/{}/object_assignments".format(ie_id), post_data=post_data, authtoken=True)
+ self.assertIn("object_category_assignments", data)
+ self.assertIs(type(data["object_category_assignments"]), dict)
+ self.assertIn(post_data["object_id"], data["object_category_assignments"])
+ self.assertIn(post_data["object_category"], data["object_category_assignments"][post_data["object_id"]])
+ self.assertIn(post_data["object_category_scope"],
+ data["object_category_assignments"][post_data["object_id"]][post_data["object_category"]])
+ data = get_url("/v3/OS-MOON/intra_extensions/{}/objects".format(ie_id), authtoken=True)
+ self.assertIn("objects", data)
+ self.assertIsInstance(data["objects"], dict)
+ self.assertIn(post_data["object_id"], data["objects"])
+ # delete the previous object
+ data = get_url("/v3/OS-MOON/intra_extensions/{}/objects/{}".format(ie_id, post_data["object_id"]),
+ method="DELETE", authtoken=True)
+ data = get_url("/v3/OS-MOON/intra_extensions/{}/objects".format(ie_id), authtoken=True)
+ self.assertIn("objects", data)
+ self.assertIsInstance(data["objects"], dict)
+ self.assertNotIn(post_data["object_id"], data["objects"])
+
+ # Get all actions_assignments
+
+ # get one action
+ post_data = {"action_id": "my_new_action"}
+ new_object = get_url("/v3/OS-MOON/intra_extensions/{}/actions".format(ie_id), post_data=post_data, authtoken=True)
+ action_id = new_object["action"]["uuid"]
+
+ post_data = {"action_category_id": uuid4().hex}
+ action_category = get_url("/v3/OS-MOON/intra_extensions/{}/action_categories".format(ie_id),
+ post_data=post_data,
+ authtoken=True)
+ action_category_id = action_category["action_category"]["uuid"]
+
+ data = get_url("/v3/OS-MOON/intra_extensions/{}/action_assignments/{}".format(
+ ie_id, action_id), authtoken=True)
+ self.assertIn("action_category_assignments", data)
+ self.assertIsInstance(data["action_category_assignments"], dict)
+
+ # Add action_assignments
+ # get one action category
+ # data = get_url("/v3/OS-MOON/intra_extensions/{}/action_categories".format(ie_id), authtoken=True)
+ # self.assertIn("action_categories", data)
+ # self.assertIs(type(data["action_categories"]), dict)
+ # action_category_id = data["action_categories"][0]
+ # get all action category scope
+ post_data = {
+ "action_category_id": action_category_id,
+ "action_category_scope_id": uuid4().hex
+ }
+ data = get_url("/v3/OS-MOON/intra_extensions/{}/action_category_scope".format(ie_id),
+ post_data=post_data,
+ authtoken=True)
+ action_category_scope_id = data["action_category_scope"]["uuid"]
+ data = get_url("/v3/OS-MOON/intra_extensions/{}/action_category_scope/{}".format(
+ ie_id, action_category_id), authtoken=True)
+ self.assertIn("action_category_scope", data)
+ self.assertIs(type(data["action_category_scope"]), dict)
+ # action_category_scope_id = data["action_category_scope"][action_category_id].keys()[0]
+ post_data = {
+ "action_id": action_id,
+ "action_category": action_category_id,
+ "action_category_scope": action_category_scope_id
+ }
+ data = get_url("/v3/OS-MOON/intra_extensions/{}/action_assignments".format(ie_id), post_data=post_data, authtoken=True)
+ self.assertIn("action_category_assignments", data)
+ self.assertIs(type(data["action_category_assignments"]), dict)
+ self.assertIn(post_data["action_id"], data["action_category_assignments"])
+ self.assertIn(post_data["action_category"], data["action_category_assignments"][post_data["action_id"]])
+ self.assertIn(post_data["action_category_scope"],
+ data["action_category_assignments"][post_data["action_id"]][post_data["action_category"]])
+ data = get_url("/v3/OS-MOON/intra_extensions/{}/actions".format(ie_id), authtoken=True)
+ self.assertIn("actions", data)
+ self.assertIsInstance(data["actions"], dict)
+ self.assertIn(post_data["action_id"], data["actions"])
+ # delete the previous action
+ data = get_url("/v3/OS-MOON/intra_extensions/{}/actions/{}".format(ie_id, post_data["action_id"]),
+ method="DELETE", authtoken=True)
+ data = get_url("/v3/OS-MOON/intra_extensions/{}/actions".format(ie_id), authtoken=True)
+ self.assertIn("actions", data)
+ self.assertIsInstance(data["actions"], dict)
+ self.assertNotIn(post_data["action_id"], data["actions"])
+
+ # Delete the intra_extension
+ get_url("/v3/OS-MOON/intra_extensions/{}".format(ie_id), method="DELETE", authtoken=True)
+ data = get_url("/v3/OS-MOON/intra_extensions", authtoken=True)
+ self.assertNotIn(ie_id, data["intra_extensions"])
+
+ def test_metadata_data(self):
+ data = get_url("/v3/OS-MOON/intra_extensions", authtoken=True)
+ self.assertIn("intra_extensions", data)
+ data = get_url("/v3/OS-MOON/authz_policies", authtoken=True)
+ self.assertIn("authz_policies", data)
+ for model in data["authz_policies"]:
+ # Create a new intra_extension
+ new_ie = {
+ "name": "new_intra_extension",
+ "description": "new_intra_extension",
+ "policymodel": model
+ }
+ data = get_url("/v3/OS-MOON/intra_extensions/", post_data=new_ie, authtoken=True)
+ for key in [u'model', u'id', u'name', u'description']:
+ self.assertIn(key, data)
+ ie_id = data["id"]
+ data = get_url("/v3/OS-MOON/intra_extensions", authtoken=True)
+ self.assertIn(ie_id, data["intra_extensions"])
+
+ # Get all subject_categories
+ data = get_url("/v3/OS-MOON/intra_extensions/{}/subject_categories".format(ie_id), authtoken=True)
+ self.assertIn("subject_categories", data)
+ self.assertIs(type(data["subject_categories"]), dict)
+
+ # Add a new subject_category
+ post_data = {"subject_category_id": uuid4().hex}
+ data = get_url("/v3/OS-MOON/intra_extensions/{}/subject_categories".format(ie_id),
+ post_data=post_data,
+ authtoken=True)
+ self.assertIn("subject_category", data)
+ self.assertIsInstance(data["subject_category"], dict)
+ self.assertEqual(post_data["subject_category_id"], data["subject_category"]["name"])
+ subject_category_id = data["subject_category"]["uuid"]
+ data = get_url("/v3/OS-MOON/intra_extensions/{}/subject_categories".format(ie_id), authtoken=True)
+ self.assertIn("subject_categories", data)
+ self.assertIsInstance(data["subject_categories"], dict)
+ self.assertIn(post_data["subject_category_id"], data["subject_categories"].values())
+ # delete the previous subject_category
+ get_url("/v3/OS-MOON/intra_extensions/{}/subject_categories/{}".format(ie_id,
+ subject_category_id),
+ method="DELETE",
+ authtoken=True)
+ data = get_url("/v3/OS-MOON/intra_extensions/{}/subject_categories".format(ie_id), authtoken=True)
+ self.assertIn("subject_categories", data)
+ self.assertIsInstance(data["subject_categories"], dict)
+ self.assertNotIn(post_data["subject_category_id"], data["subject_categories"].values())
+
+ # Get all object_categories
+ data = get_url("/v3/OS-MOON/intra_extensions/{}/object_categories".format(ie_id), authtoken=True)
+ self.assertIn("object_categories", data)
+ self.assertIsInstance(data["object_categories"], dict)
+
+ # Add a new object_category
+ post_data = {"object_category_id": uuid4().hex}
+ data = get_url("/v3/OS-MOON/intra_extensions/{}/object_categories".format(ie_id),
+ post_data=post_data,
+ authtoken=True)
+ self.assertIn("object_category", data)
+ self.assertIsInstance(data["object_category"], dict)
+ self.assertIn(post_data["object_category_id"], data["object_category"]["name"])
+ object_category_id = data["object_category"]["uuid"]
+ data = get_url("/v3/OS-MOON/intra_extensions/{}/object_categories".format(ie_id), authtoken=True)
+ self.assertIn("object_categories", data)
+ self.assertIsInstance(data["object_categories"], dict)
+ self.assertIn(post_data["object_category_id"], data["object_categories"].values())
+ # delete the previous subject_category
+ get_url("/v3/OS-MOON/intra_extensions/{}/object_categories/{}".format(ie_id,
+ object_category_id),
+ method="DELETE",
+ authtoken=True)
+ data = get_url("/v3/OS-MOON/intra_extensions/{}/object_categories".format(ie_id), authtoken=True)
+ self.assertIn("object_categories", data)
+ self.assertIsInstance(data["object_categories"], dict)
+ self.assertNotIn(post_data["object_category_id"], data["object_categories"].values())
+
+ # Get all action_categories
+ data = get_url("/v3/OS-MOON/intra_extensions/{}/action_categories".format(ie_id), authtoken=True)
+ self.assertIn("action_categories", data)
+ self.assertIsInstance(data["action_categories"], dict)
+
+ # Add a new action_category
+ post_data = {"action_category_id": uuid4().hex}
+ data = get_url("/v3/OS-MOON/intra_extensions/{}/action_categories".format(ie_id),
+ post_data=post_data,
+ authtoken=True)
+ self.assertIn("action_category", data)
+ self.assertIsInstance(data["action_category"], dict)
+ self.assertIn(post_data["action_category_id"], data["action_category"]["name"])
+ action_category_id = data["action_category"]["uuid"]
+ data = get_url("/v3/OS-MOON/intra_extensions/{}/action_categories".format(ie_id), authtoken=True)
+ self.assertIn("action_categories", data)
+ self.assertIsInstance(data["action_categories"], dict)
+ self.assertIn(post_data["action_category_id"], data["action_categories"].values())
+ # delete the previous subject_category
+ get_url("/v3/OS-MOON/intra_extensions/{}/action_categories/{}".format(ie_id,
+ action_category_id),
+ method="DELETE",
+ authtoken=True)
+ data = get_url("/v3/OS-MOON/intra_extensions/{}/action_categories".format(ie_id), authtoken=True)
+ self.assertIn("action_categories", data)
+ self.assertIsInstance(data["action_categories"], dict)
+ self.assertNotIn(post_data["action_category_id"], data["action_categories"].values())
+
+ # Delete the intra_extension
+ get_url("/v3/OS-MOON/intra_extensions/{}".format(ie_id), method="DELETE", authtoken=True)
+ data = get_url("/v3/OS-MOON/intra_extensions", authtoken=True)
+ self.assertNotIn(ie_id, data["intra_extensions"])
+
+ def test_scope_data(self):
+ data = get_url("/v3/OS-MOON/intra_extensions", authtoken=True)
+ self.assertIn("intra_extensions", data)
+ data = get_url("/v3/OS-MOON/authz_policies", authtoken=True)
+ self.assertIn("authz_policies", data)
+ for model in data["authz_policies"]:
+ # Create a new intra_extension
+ new_ie = {
+ "name": "new_intra_extension",
+ "description": "new_intra_extension",
+ "policymodel": model
+ }
+ data = get_url("/v3/OS-MOON/intra_extensions/", post_data=new_ie, authtoken=True)
+ for key in [u'model', u'id', u'name', u'description']:
+ self.assertIn(key, data)
+ ie_id = data["id"]
+ data = get_url("/v3/OS-MOON/intra_extensions", authtoken=True)
+ self.assertIn(ie_id, data["intra_extensions"])
+
+ # Get all subject_category_scope
+ categories = get_url("/v3/OS-MOON/intra_extensions/{}/subject_categories".format(ie_id), authtoken=True)
+ for category in categories["subject_categories"]:
+ data = get_url("/v3/OS-MOON/intra_extensions/{}/subject_category_scope/{}".format(
+ ie_id, category), authtoken=True)
+ self.assertIn("subject_category_scope", data)
+ self.assertIs(type(data["subject_category_scope"]), dict)
+
+ # Add a new subject_category_scope
+ post_data = {
+ "subject_category_id": category,
+ "subject_category_scope_id": uuid4().hex
+ }
+ data = get_url("/v3/OS-MOON/intra_extensions/{}/subject_category_scope".format(ie_id),
+ post_data=post_data,
+ authtoken=True)
+ self.assertIn("subject_category_scope", data)
+ self.assertIsInstance(data["subject_category_scope"], dict)
+ self.assertEqual(post_data["subject_category_scope_id"], data["subject_category_scope"]["name"])
+ data = get_url("/v3/OS-MOON/intra_extensions/{}/subject_category_scope/{}".format(
+ ie_id, category), authtoken=True)
+ self.assertIn("subject_category_scope", data)
+ self.assertIsInstance(data["subject_category_scope"], dict)
+ self.assertIn(post_data["subject_category_id"], data["subject_category_scope"])
+ self.assertIn(post_data["subject_category_scope_id"],
+ data["subject_category_scope"][category].values())
+ # delete the previous subject_category_scope
+ get_url("/v3/OS-MOON/intra_extensions/{}/subject_category_scope/{}/{}".format(
+ ie_id,
+ post_data["subject_category_id"],
+ post_data["subject_category_scope_id"]),
+ method="DELETE",
+ authtoken=True)
+ data = get_url("/v3/OS-MOON/intra_extensions/{}/subject_category_scope/{}".format(
+ ie_id, category), authtoken=True)
+ self.assertIn("subject_category_scope", data)
+ self.assertIsInstance(data["subject_category_scope"], dict)
+ self.assertIn(post_data["subject_category_id"], data["subject_category_scope"])
+ self.assertNotIn(post_data["subject_category_scope_id"],
+ data["subject_category_scope"][post_data["subject_category_id"]])
+
+ # Get all object_category_scope
+ # get object_categories
+ categories = get_url("/v3/OS-MOON/intra_extensions/{}/object_categories".format(ie_id), authtoken=True)
+ for category in categories["object_categories"]:
+ post_data = {
+ "object_category_id": category,
+ "object_category_scope_id": uuid4().hex
+ }
+ data = get_url("/v3/OS-MOON/intra_extensions/{}/object_category_scope".format(ie_id),
+ post_data=post_data,
+ authtoken=True)
+ self.assertIn("object_category_scope", data)
+ self.assertIsInstance(data["object_category_scope"], dict)
+ self.assertEqual(post_data["object_category_scope_id"], data["object_category_scope"]["name"])
+ data = get_url("/v3/OS-MOON/intra_extensions/{}/object_category_scope/{}".format(
+ ie_id, category), authtoken=True)
+ self.assertIn("object_category_scope", data)
+ self.assertIsInstance(data["object_category_scope"], dict)
+ self.assertIn(post_data["object_category_id"], data["object_category_scope"])
+ self.assertIn(post_data["object_category_scope_id"],
+ data["object_category_scope"][category].values())
+ # delete the previous object_category_scope
+ get_url("/v3/OS-MOON/intra_extensions/{}/object_category_scope/{}/{}".format(
+ ie_id,
+ post_data["object_category_id"],
+ post_data["object_category_scope_id"]),
+ method="DELETE",
+ authtoken=True)
+ data = get_url("/v3/OS-MOON/intra_extensions/{}/object_category_scope/{}".format(
+ ie_id, category), authtoken=True)
+ self.assertIn("object_category_scope", data)
+ self.assertIsInstance(data["object_category_scope"], dict)
+ self.assertIn(post_data["object_category_id"], data["object_category_scope"])
+ self.assertNotIn(post_data["object_category_scope_id"],
+ data["object_category_scope"][post_data["object_category_id"]])
+
+ # Get all action_category_scope
+ categories = get_url("/v3/OS-MOON/intra_extensions/{}/action_categories".format(ie_id), authtoken=True)
+ print(categories)
+ for category in categories["action_categories"]:
+ print(category)
+ data = get_url("/v3/OS-MOON/intra_extensions/{}/action_category_scope/{}".format(
+ ie_id, category), authtoken=True)
+ self.assertIn("action_category_scope", data)
+ self.assertIsInstance(data["action_category_scope"], dict)
+
+ # Add a new action_category_scope
+ post_data = {
+ "action_category_id": category,
+ "action_category_scope_id": uuid4().hex
+ }
+ data = get_url("/v3/OS-MOON/intra_extensions/{}/action_category_scope".format(ie_id),
+ post_data=post_data,
+ authtoken=True)
+ self.assertIn("action_category_scope", data)
+ self.assertIsInstance(data["action_category_scope"], dict)
+ self.assertEqual(post_data["action_category_scope_id"], data["action_category_scope"]["name"])
+ data = get_url("/v3/OS-MOON/intra_extensions/{}/action_category_scope/{}".format(
+ ie_id, category), authtoken=True)
+ self.assertIn("action_category_scope", data)
+ self.assertIsInstance(data["action_category_scope"], dict)
+ self.assertIn(post_data["action_category_id"], data["action_category_scope"])
+ self.assertIn(post_data["action_category_scope_id"],
+ data["action_category_scope"][category].values())
+ # delete the previous action_category_scope
+ get_url("/v3/OS-MOON/intra_extensions/{}/action_category_scope/{}/{}".format(
+ ie_id,
+ post_data["action_category_id"],
+ post_data["action_category_scope_id"]),
+ method="DELETE",
+ authtoken=True)
+ data = get_url("/v3/OS-MOON/intra_extensions/{}/action_category_scope/{}".format(
+ ie_id, category), authtoken=True)
+ self.assertIn("action_category_scope", data)
+ self.assertIsInstance(data["action_category_scope"], dict)
+ self.assertIn(post_data["action_category_id"], data["action_category_scope"])
+ self.assertNotIn(post_data["action_category_scope_id"],
+ data["action_category_scope"][post_data["action_category_id"]])
+
+ # Delete the intra_extension
+ get_url("/v3/OS-MOON/intra_extensions/{}".format(ie_id), method="DELETE", authtoken=True)
+ data = get_url("/v3/OS-MOON/intra_extensions", authtoken=True)
+ self.assertNotIn(ie_id, data["intra_extensions"])
+
+ def test_metarule_data(self):
+ data = get_url("/v3/OS-MOON/intra_extensions", authtoken=True)
+ self.assertIn("intra_extensions", data)
+ data = get_url("/v3/OS-MOON/authz_policies", authtoken=True)
+ self.assertIn("authz_policies", data)
+ for model in data["authz_policies"]:
+ # Create a new intra_extension
+ new_ie = {
+ "name": "new_intra_extension",
+ "description": "new_intra_extension",
+ "policymodel": model
+ }
+ data = get_url("/v3/OS-MOON/intra_extensions/", post_data=new_ie, authtoken=True)
+ for key in [u'model', u'id', u'name', u'description']:
+ self.assertIn(key, data)
+ ie_id = data["id"]
+ data = get_url("/v3/OS-MOON/intra_extensions", authtoken=True)
+ self.assertIn(ie_id, data["intra_extensions"])
+
+ # Get all aggregation_algorithms
+ data = get_url("/v3/OS-MOON/intra_extensions/{}/aggregation_algorithms".format(ie_id), authtoken=True)
+ self.assertIn("aggregation_algorithms", data)
+ self.assertIs(type(data["aggregation_algorithms"]), list)
+ aggregation_algorithms = data["aggregation_algorithms"]
+
+ # Get all sub_meta_rule_relations
+ data = get_url("/v3/OS-MOON/intra_extensions/{}/sub_meta_rule_relations".format(ie_id), authtoken=True)
+ self.assertIn("sub_meta_rule_relations", data)
+ self.assertIs(type(data["sub_meta_rule_relations"]), list)
+ sub_meta_rule_relations = data["sub_meta_rule_relations"]
+
+ # Get current aggregation_algorithm
+ data = get_url("/v3/OS-MOON/intra_extensions/{}/aggregation_algorithm".format(ie_id), authtoken=True)
+ self.assertIn("aggregation", data)
+ self.assertIn(type(data["aggregation"]), (str, unicode))
+ aggregation_algorithm = data["aggregation"]
+
+ # Set current aggregation_algorithm
+ post_data = {"aggregation_algorithm": ""}
+ for _algo in aggregation_algorithms:
+ if _algo != aggregation_algorithm:
+ post_data = {"aggregation_algorithm": _algo}
+ data = get_url("/v3/OS-MOON/intra_extensions/{}/aggregation_algorithm".format(ie_id),
+ post_data=post_data,
+ authtoken=True)
+ self.assertIn("aggregation", data)
+ self.assertIn(type(data["aggregation"]), (str, unicode))
+ self.assertEqual(post_data["aggregation_algorithm"], data["aggregation"])
+ new_aggregation_algorithm = data["aggregation"]
+ data = get_url("/v3/OS-MOON/intra_extensions/{}/aggregation_algorithm".format(ie_id), authtoken=True)
+ self.assertIn("aggregation", data)
+ self.assertIn(type(data["aggregation"]), (str, unicode))
+ self.assertEqual(post_data["aggregation_algorithm"], new_aggregation_algorithm)
+ # Get back to the old value
+ post_data = {"aggregation_algorithm": aggregation_algorithm}
+ data = get_url("/v3/OS-MOON/intra_extensions/{}/aggregation_algorithm".format(ie_id),
+ post_data=post_data,
+ authtoken=True)
+ self.assertIn("aggregation", data)
+ self.assertIn(type(data["aggregation"]), (str, unicode))
+ self.assertEqual(post_data["aggregation_algorithm"], aggregation_algorithm)
+
+ # Get current sub_meta_rule
+ data = get_url("/v3/OS-MOON/intra_extensions/{}/sub_meta_rule".format(ie_id), authtoken=True)
+ self.assertIn("sub_meta_rules", data)
+ self.assertIs(type(data["sub_meta_rules"]), dict)
+ self.assertGreater(len(data["sub_meta_rules"].keys()), 0)
+ relation = data["sub_meta_rules"].keys()[0]
+ new_relation = ""
+ self.assertIn(relation, sub_meta_rule_relations)
+ sub_meta_rule = data["sub_meta_rules"]
+ post_data = dict()
+ for _relation in sub_meta_rule_relations:
+ if _relation != data["sub_meta_rules"].keys()[0]:
+ post_data[_relation] = copy.deepcopy(sub_meta_rule[relation])
+ post_data[_relation]["relation"] = _relation
+ new_relation = _relation
+ break
+ # Add a new subject category
+ subject_category = uuid4().hex
+ data = get_url("/v3/OS-MOON/intra_extensions/{}/subject_categories".format(ie_id),
+ post_data={"subject_category_id": subject_category},
+ authtoken=True)
+ self.assertIn("subject_category", data)
+ self.assertIsInstance(data["subject_category"], dict)
+ self.assertIn(subject_category, data["subject_category"].values())
+ subject_category_id = data["subject_category"]['uuid']
+ # Add a new object category
+ object_category = uuid4().hex
+ data = get_url("/v3/OS-MOON/intra_extensions/{}/object_categories".format(ie_id),
+ post_data={"object_category_id": object_category},
+ authtoken=True)
+ self.assertIn("object_category", data)
+ self.assertIsInstance(data["object_category"], dict)
+ self.assertIn(object_category, data["object_category"].values())
+ object_category_id = data["object_category"]['uuid']
+ # Add a new action category
+ action_category = uuid4().hex
+ data = get_url("/v3/OS-MOON/intra_extensions/{}/action_categories".format(ie_id),
+ post_data={"action_category_id": action_category},
+ authtoken=True)
+ self.assertIn("action_category", data)
+ self.assertIsInstance(data["action_category"], dict)
+ self.assertIn(action_category, data["action_category"].values())
+ action_category_id = data["action_category"]['uuid']
+ # Modify the post_data to add new categories
+ post_data[new_relation]["subject_categories"].append(subject_category_id)
+ post_data[new_relation]["object_categories"].append(object_category_id)
+ post_data[new_relation]["action_categories"].append(action_category_id)
+ data = get_url("/v3/OS-MOON/intra_extensions/{}/sub_meta_rule".format(ie_id),
+ post_data=post_data,
+ authtoken=True)
+ self.assertIn("sub_meta_rules", data)
+ self.assertIs(type(data["sub_meta_rules"]), dict)
+ self.assertGreater(len(data["sub_meta_rules"].keys()), 0)
+ self.assertEqual(new_relation, data["sub_meta_rules"].keys()[0])
+ self.assertIn(subject_category_id, data["sub_meta_rules"][new_relation]["subject_categories"])
+ self.assertIn(object_category_id, data["sub_meta_rules"][new_relation]["object_categories"])
+ self.assertIn(action_category_id, data["sub_meta_rules"][new_relation]["action_categories"])
+ self.assertEqual(new_relation, data["sub_meta_rules"][new_relation]["relation"])
+
+ # Delete the intra_extension
+ data = get_url("/v3/OS-MOON/intra_extensions/{}".format(ie_id), method="DELETE", authtoken=True)
+ data = get_url("/v3/OS-MOON/intra_extensions", authtoken=True)
+ self.assertNotIn(ie_id, data["intra_extensions"])
+
+ def test_rules_data(self):
+ data = get_url("/v3/OS-MOON/intra_extensions", authtoken=True)
+ self.assertIn("intra_extensions", data)
+ data = get_url("/v3/OS-MOON/authz_policies", authtoken=True)
+ self.assertIn("authz_policies", data)
+ for model in data["authz_policies"]:
+ # Create a new intra_extension
+ print("=====> {}".format(model))
+ new_ie = {
+ "name": "new_intra_extension",
+ "description": "new_intra_extension",
+ "policymodel": model
+ }
+ data = get_url("/v3/OS-MOON/intra_extensions/", post_data=new_ie, authtoken=True)
+ for key in [u'model', u'id', u'name', u'description']:
+ self.assertIn(key, data)
+ ie_id = data["id"]
+ data = get_url("/v3/OS-MOON/intra_extensions", authtoken=True)
+ self.assertIn(ie_id, data["intra_extensions"])
+
+ # Get all sub_meta_rule_relations
+ data = get_url("/v3/OS-MOON/intra_extensions/{}/sub_meta_rule_relations".format(ie_id), authtoken=True)
+ self.assertIn("sub_meta_rule_relations", data)
+ self.assertIs(type(data["sub_meta_rule_relations"]), list)
+ sub_meta_rule_relations = data["sub_meta_rule_relations"]
+
+ # Get current sub_meta_rule
+ data = get_url("/v3/OS-MOON/intra_extensions/{}/sub_meta_rule".format(ie_id), authtoken=True)
+ self.assertIn("sub_meta_rules", data)
+ self.assertIs(type(data["sub_meta_rules"]), dict)
+ self.assertGreater(len(data["sub_meta_rules"].keys()), 0)
+ relation = data["sub_meta_rules"].keys()[0]
+ self.assertIn(relation, sub_meta_rule_relations)
+ sub_meta_rule = data["sub_meta_rules"]
+ sub_meta_rule_length = dict()
+ sub_meta_rule_length[relation] = len(data["sub_meta_rules"][relation]["subject_categories"]) + \
+ len(data["sub_meta_rules"][relation]["object_categories"]) + \
+ len(data["sub_meta_rules"][relation]["action_categories"]) +1
+
+ # Get all rules
+ data = get_url("/v3/OS-MOON/intra_extensions/{}/sub_rules".format(ie_id), authtoken=True)
+ self.assertIn("rules", data)
+ self.assertIs(type(data["rules"]), dict)
+ length = dict()
+ for key in data["rules"]:
+ self.assertIn(key, sub_meta_rule_relations)
+ self.assertGreater(len(data["rules"][key]), 0)
+ self.assertIs(type(data["rules"][key]), list)
+ for sub_rule in data["rules"][key]:
+ self.assertEqual(len(sub_rule), sub_meta_rule_length[key])
+ length[key] = len(data["rules"][key])
+
+ # Get one value of subject category scope
+ # FIXME: a better test would be to add a new value in scope and then add it to a new sub-rule
+ categories = get_url("/v3/OS-MOON/intra_extensions/{}/subject_categories".format(ie_id),
+ authtoken=True)["subject_categories"].keys()
+ data = get_url("/v3/OS-MOON/intra_extensions/{}/subject_category_scope/{}".format(
+ ie_id, categories[0]), authtoken=True)
+ self.assertIn("subject_category_scope", data)
+ self.assertIs(type(data["subject_category_scope"]), dict)
+ subject_category = categories.pop()
+ subject_value = data["subject_category_scope"][subject_category].keys()[0]
+ # Get one value of object category scope
+ # FIXME: a better test would be to add a new value in scope and then add it to a new sub-rule
+ categories = get_url("/v3/OS-MOON/intra_extensions/{}/object_categories".format(ie_id),
+ authtoken=True)["object_categories"].keys()
+ data = get_url("/v3/OS-MOON/intra_extensions/{}/object_category_scope/{}".format(
+ ie_id, categories[0]), authtoken=True)
+ self.assertIn("object_category_scope", data)
+ self.assertIs(type(data["object_category_scope"]), dict)
+ object_category = categories.pop()
+ object_value = data["object_category_scope"][object_category].keys()[0]
+ # Get one or more values in action category scope
+ _sub_meta_action_value = list()
+ for _sub_meta_cat in sub_meta_rule[relation]["action_categories"]:
+ data = get_url("/v3/OS-MOON/intra_extensions/{}/action_category_scope/{}".format(
+ ie_id, _sub_meta_cat), authtoken=True)
+ action_value = data["action_category_scope"][_sub_meta_cat].keys()[0]
+ _sub_meta_action_value.append(action_value)
+ _sub_meta_rules = list()
+ _sub_meta_rules.append(subject_value)
+ _sub_meta_rules.extend(_sub_meta_action_value)
+ _sub_meta_rules.append(object_value)
+ # Must append True because the sub_rule need a boolean to know if it is a positive or a negative value
+ _sub_meta_rules.append(True)
+ post_data = {
+ "rule": _sub_meta_rules,
+ "relation": "relation_super"
+ }
+ # Add a new sub-rule
+ data = get_url("/v3/OS-MOON/intra_extensions/{}/sub_rules".format(ie_id),
+ post_data=post_data, authtoken=True)
+ self.assertIn("rules", data)
+ self.assertIs(type(data["rules"]), dict)
+ for key in data["rules"]:
+ self.assertIn(key, sub_meta_rule_relations)
+ self.assertGreater(len(data["rules"][key]), 0)
+ for sub_rule in data["rules"][key]:
+ self.assertEqual(len(sub_rule), sub_meta_rule_length[key])
+ if key == "relation_super":
+ self.assertEqual(len(data["rules"][key]), length[key]+1)
+ else:
+ self.assertEqual(len(data["rules"][key]), length[key])
+
+ # Delete the new sub-rule
+ data = get_url("/v3/OS-MOON/intra_extensions/{}/sub_rules/{rel}/{rule}".format(
+ ie_id,
+ rel=post_data["relation"],
+ rule="+".join(map(lambda x: str(x), post_data["rule"]))),
+ method="DELETE", authtoken=True)
+ self.assertIn("rules", data)
+ self.assertIs(type(data["rules"]), dict)
+ for key in data["rules"]:
+ self.assertIn(key, sub_meta_rule_relations)
+ self.assertGreater(len(data["rules"][key]), 0)
+ for sub_rule in data["rules"][key]:
+ if key == "relation_super":
+ self.assertEqual(len(data["rules"][key]), length[key])
+ else:
+ self.assertEqual(len(data["rules"][key]), length[key])
+
+ # Delete the intra_extension
+ data = get_url("/v3/OS-MOON/intra_extensions/{}".format(ie_id), method="DELETE", authtoken=True)
+ data = get_url("/v3/OS-MOON/intra_extensions", authtoken=True)
+ self.assertNotIn(ie_id, data["intra_extensions"])
+
+
+if __name__ == "__main__":
+ unittest.main()
diff --git a/keystone-moon/keystone/tests/moon/func/test_func_api_log.py b/keystone-moon/keystone/tests/moon/func/test_func_api_log.py
new file mode 100644
index 00000000..f081aef1
--- /dev/null
+++ b/keystone-moon/keystone/tests/moon/func/test_func_api_log.py
@@ -0,0 +1,148 @@
+# Copyright 2015 Open Platform for NFV Project, Inc. and its contributors
+# This software is distributed under the terms and conditions of the 'Apache-2.0'
+# license which can be found in the file 'LICENSE' in this package distribution
+# or at 'http://www.apache.org/licenses/LICENSE-2.0'.
+
+import unittest
+import json
+import httplib
+import time
+from uuid import uuid4
+import copy
+
+CREDENTIALS = {
+ "host": "127.0.0.1",
+ "port": "35357",
+ "login": "admin",
+ "password": "nomoresecrete",
+ "tenant_name": "demo",
+ "sessionid": "kxb50d9uusiywfcs2fiidmu1j5nsyckr",
+ "csrftoken": "",
+ "x-subject-token": ""
+}
+
+
+def get_url(url, post_data=None, delete_data=None, crsftoken=None, method="GET", authtoken=None):
+ # MOON_SERVER_IP["URL"] = url
+ # _url = "http://{HOST}:{PORT}".format(**MOON_SERVER_IP)
+ if post_data:
+ method = "POST"
+ if delete_data:
+ method = "DELETE"
+ # print("\033[32m{} {}\033[m".format(method, url))
+ conn = httplib.HTTPConnection(CREDENTIALS["host"], CREDENTIALS["port"])
+ headers = {
+ "Content-type": "application/x-www-form-urlencoded",
+ # "Accept": "text/plain",
+ "Accept": "text/plain,text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8",
+ 'Cookie': 'sessionid={}'.format(CREDENTIALS["sessionid"]),
+ }
+ if crsftoken:
+ headers["Cookie"] = "csrftoken={}; sessionid={}; NG_TRANSLATE_LANG_KEY:\"en\"".format(crsftoken, CREDENTIALS["sessionid"])
+ CREDENTIALS["crsftoken"] = crsftoken
+ if authtoken:
+ headers["X-Auth-Token"] = CREDENTIALS["x-subject-token"]
+ if post_data:
+ method = "POST"
+ headers["Content-type"] = "application/json"
+ if crsftoken:
+ post_data = "&".join(map(lambda x: "=".join(x), post_data))
+ elif "crsftoken" in CREDENTIALS and "sessionid" in CREDENTIALS:
+ post_data = json.dumps(post_data)
+ headers["Cookie"] = "csrftoken={}; sessionid={}; NG_TRANSLATE_LANG_KEY:\"en\"".format(
+ CREDENTIALS["crsftoken"],
+ CREDENTIALS["sessionid"])
+ else:
+ post_data = json.dumps(post_data)
+ # conn.request(method, url, json.dumps(post_data), headers=headers)
+ conn.request(method, url, post_data, headers=headers)
+ elif delete_data:
+ method = "DELETE"
+ conn.request(method, url, json.dumps(delete_data), headers=headers)
+ else:
+ conn.request(method, url, headers=headers)
+ resp = conn.getresponse()
+ headers = resp.getheaders()
+ try:
+ CREDENTIALS["x-subject-token"] = dict(headers)["x-subject-token"]
+ except KeyError:
+ pass
+ if crsftoken:
+ sessionid_start = dict(headers)["set-cookie"].index("sessionid=")+len("sessionid=")
+ sessionid_end = dict(headers)["set-cookie"].index(";", sessionid_start)
+ sessionid = dict(headers)["set-cookie"][sessionid_start:sessionid_end]
+ CREDENTIALS["sessionid"] = sessionid
+ content = resp.read()
+ conn.close()
+ try:
+ return json.loads(content)
+ except ValueError:
+ return {"content": content}
+
+
+class IntraExtensionsTest(unittest.TestCase):
+
+ TIME_FORMAT = '%Y-%m-%d-%H:%M:%S'
+
+ def setUp(self):
+ post = {
+ "auth": {
+ "identity": {
+ "methods": [
+ "password"
+ ],
+ "password": {
+ "user": {
+ "domain": {
+ "id": "Default"
+ },
+ "name": "admin",
+ "password": "nomoresecrete"
+ }
+ }
+ },
+ "scope": {
+ "project": {
+ "domain": {
+ "id": "Default"
+ },
+ "name": "demo"
+ }
+ }
+ }
+ }
+ data = get_url("/v3/auth/tokens", post_data=post)
+ self.assertIn("token", data)
+
+ def tearDown(self):
+ pass
+
+ def test_get_logs(self):
+ all_data = get_url("/v3/OS-MOON/logs", authtoken=True)
+ len_all_data = len(all_data["logs"])
+ data_1 = all_data["logs"][len_all_data/2]
+ time_data_1 = data_1.split(" ")[0]
+ data_2 = all_data["logs"][len_all_data/2+10]
+ time_data_2 = data_2.split(" ")[0]
+ self.assertIn("logs", all_data)
+ data = get_url("/v3/OS-MOON/logs/filter=authz", authtoken=True)
+ self.assertIn("logs", data)
+ self.assertGreater(len_all_data, len(data["logs"]))
+ data = get_url("/v3/OS-MOON/logs/from={}".format(time_data_1), authtoken=True)
+ self.assertIn("logs", data)
+ self.assertGreater(len_all_data, len(data["logs"]))
+ # for _data in data["logs"]:
+ # self.assertGreater(time.strptime(_data.split(" "), self.TIME_FORMAT),
+ # time.strptime(time_data_1, self.TIME_FORMAT))
+ data = get_url("/v3/OS-MOON/logs/from={},to={}".format(time_data_1, time_data_2), authtoken=True)
+ self.assertIn("logs", data)
+ self.assertGreater(len_all_data, len(data["logs"]))
+ self.assertEqual(10, len(data["logs"]))
+ data = get_url("/v3/OS-MOON/logs/event_number=20", authtoken=True)
+ self.assertIn("logs", data)
+ self.assertGreater(len_all_data, len(data["logs"]))
+ self.assertEqual(20, len(data["logs"]))
+
+
+if __name__ == "__main__":
+ unittest.main()
diff --git a/keystone-moon/keystone/tests/moon/func/test_func_api_tenant.py b/keystone-moon/keystone/tests/moon/func/test_func_api_tenant.py
new file mode 100644
index 00000000..c52e068e
--- /dev/null
+++ b/keystone-moon/keystone/tests/moon/func/test_func_api_tenant.py
@@ -0,0 +1,154 @@
+# Copyright 2015 Open Platform for NFV Project, Inc. and its contributors
+# This software is distributed under the terms and conditions of the 'Apache-2.0'
+# license which can be found in the file 'LICENSE' in this package distribution
+# or at 'http://www.apache.org/licenses/LICENSE-2.0'.
+
+import unittest
+import json
+import httplib
+import time
+from uuid import uuid4
+import copy
+
+CREDENTIALS = {
+ "host": "127.0.0.1",
+ "port": "35357",
+ "login": "admin",
+ "password": "nomoresecrete",
+ "tenant_name": "demo",
+ "sessionid": "kxb50d9uusiywfcs2fiidmu1j5nsyckr",
+ "csrftoken": "",
+ "x-subject-token": ""
+}
+
+
+def get_url(url, post_data=None, delete_data=None, crsftoken=None, method="GET", authtoken=None):
+ # MOON_SERVER_IP["URL"] = url
+ # _url = "http://{HOST}:{PORT}".format(**MOON_SERVER_IP)
+ if post_data:
+ method = "POST"
+ if delete_data:
+ method = "DELETE"
+ # print("\033[32m{} {}\033[m".format(method, url))
+ conn = httplib.HTTPConnection(CREDENTIALS["host"], CREDENTIALS["port"])
+ headers = {
+ "Content-type": "application/x-www-form-urlencoded",
+ # "Accept": "text/plain",
+ "Accept": "text/plain,text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8",
+ 'Cookie': 'sessionid={}'.format(CREDENTIALS["sessionid"]),
+ }
+ if crsftoken:
+ headers["Cookie"] = "csrftoken={}; sessionid={}; NG_TRANSLATE_LANG_KEY:\"en\"".format(crsftoken, CREDENTIALS["sessionid"])
+ CREDENTIALS["crsftoken"] = crsftoken
+ if authtoken:
+ headers["X-Auth-Token"] = CREDENTIALS["x-subject-token"]
+ if post_data:
+ method = "POST"
+ headers["Content-type"] = "application/json"
+ if crsftoken:
+ post_data = "&".join(map(lambda x: "=".join(x), post_data))
+ elif "crsftoken" in CREDENTIALS and "sessionid" in CREDENTIALS:
+ post_data = json.dumps(post_data)
+ headers["Cookie"] = "csrftoken={}; sessionid={}; NG_TRANSLATE_LANG_KEY:\"en\"".format(
+ CREDENTIALS["crsftoken"],
+ CREDENTIALS["sessionid"])
+ else:
+ post_data = json.dumps(post_data)
+ # conn.request(method, url, json.dumps(post_data), headers=headers)
+ conn.request(method, url, post_data, headers=headers)
+ elif delete_data:
+ method = "DELETE"
+ conn.request(method, url, json.dumps(delete_data), headers=headers)
+ else:
+ conn.request(method, url, headers=headers)
+ resp = conn.getresponse()
+ headers = resp.getheaders()
+ try:
+ CREDENTIALS["x-subject-token"] = dict(headers)["x-subject-token"]
+ except KeyError:
+ pass
+ if crsftoken:
+ sessionid_start = dict(headers)["set-cookie"].index("sessionid=")+len("sessionid=")
+ sessionid_end = dict(headers)["set-cookie"].index(";", sessionid_start)
+ sessionid = dict(headers)["set-cookie"][sessionid_start:sessionid_end]
+ CREDENTIALS["sessionid"] = sessionid
+ content = resp.read()
+ conn.close()
+ try:
+ return json.loads(content)
+ except ValueError:
+ return {"content": content}
+
+
+class MappingsTest(unittest.TestCase):
+
+ def setUp(self):
+ post = {
+ "auth": {
+ "identity": {
+ "methods": [
+ "password"
+ ],
+ "password": {
+ "user": {
+ "domain": {
+ "id": "Default"
+ },
+ "name": "admin",
+ "password": "nomoresecrete"
+ }
+ }
+ },
+ "scope": {
+ "project": {
+ "domain": {
+ "id": "Default"
+ },
+ "name": "demo"
+ }
+ }
+ }
+ }
+ data = get_url("/v3/auth/tokens", post_data=post)
+ self.assertIn("token", data)
+
+ def tearDown(self):
+ pass
+
+ def test_get_tenants(self):
+ data = get_url("/v3/OS-MOON/tenants", authtoken=True)
+ self.assertIn("tenants", data)
+ self.assertIsInstance(data["tenants"], list)
+ print(data)
+
+ def test_add_delete_mapping(self):
+ data = get_url("/v3/projects", authtoken=True)
+ project_id = None
+ for project in data["projects"]:
+ if project["name"] == "demo":
+ project_id = project["id"]
+ data = get_url("/v3/OS-MOON/tenant",
+ post_data={
+ "id": project_id,
+ "name": "tenant1",
+ "authz": "intra_extension_uuid1",
+ "admin": "intra_extension_uuid2"
+ },
+ authtoken=True)
+ self.assertIn("tenant", data)
+ self.assertIsInstance(data["tenant"], dict)
+ uuid = data["tenant"]["id"]
+ data = get_url("/v3/OS-MOON/tenants", authtoken=True)
+ self.assertIn("tenants", data)
+ self.assertIsInstance(data["tenants"], list)
+ print(data)
+ data = get_url("/v3/OS-MOON/tenant/{}".format(uuid),
+ method="DELETE",
+ authtoken=True)
+ data = get_url("/v3/OS-MOON/tenants", authtoken=True)
+ self.assertIn("tenants", data)
+ self.assertIsInstance(data["tenants"], list)
+ print(data)
+
+if __name__ == "__main__":
+ unittest.main()
diff --git a/keystone-moon/keystone/tests/moon/unit/__init__.py b/keystone-moon/keystone/tests/moon/unit/__init__.py
new file mode 100644
index 00000000..1b678d53
--- /dev/null
+++ b/keystone-moon/keystone/tests/moon/unit/__init__.py
@@ -0,0 +1,4 @@
+# Copyright 2015 Open Platform for NFV Project, Inc. and its contributors
+# This software is distributed under the terms and conditions of the 'Apache-2.0'
+# license which can be found in the file 'LICENSE' in this package distribution
+# or at 'http://www.apache.org/licenses/LICENSE-2.0'.
diff --git a/keystone-moon/keystone/tests/moon/unit/test_unit_core_intra_extension_admin.py b/keystone-moon/keystone/tests/moon/unit/test_unit_core_intra_extension_admin.py
new file mode 100644
index 00000000..03ef845c
--- /dev/null
+++ b/keystone-moon/keystone/tests/moon/unit/test_unit_core_intra_extension_admin.py
@@ -0,0 +1,1229 @@
+# Copyright 2015 Open Platform for NFV Project, Inc. and its contributors
+# This software is distributed under the terms and conditions of the 'Apache-2.0'
+# license which can be found in the file 'LICENSE' in this package distribution
+# or at 'http://www.apache.org/licenses/LICENSE-2.0'.
+
+"""Unit tests for core IntraExtensionAdminManager"""
+
+import json
+import os
+import uuid
+from oslo_config import cfg
+from keystone.tests import unit as tests
+from keystone.contrib.moon.core import IntraExtensionAdminManager, IntraExtensionAuthzManager
+from keystone.tests.unit.ksfixtures import database
+from keystone import resource
+from keystone.contrib.moon.exception import *
+from keystone.tests.unit import default_fixtures
+from keystone.contrib.moon.core import LogManager, TenantManager
+
+CONF = cfg.CONF
+
+USER_ADMIN = {
+ 'name': 'admin',
+ 'domain_id': "default",
+ 'password': 'admin'
+}
+
+IE = {
+ "name": "test IE",
+ "policymodel": "policy_rbac_authz",
+ "description": "a simple description."
+}
+
+class TestIntraExtensionAdminManager(tests.TestCase):
+
+ def setUp(self):
+ self.useFixture(database.Database())
+ super(TestIntraExtensionAdminManager, self).setUp()
+ self.load_backends()
+ self.load_fixtures(default_fixtures)
+ self.manager = IntraExtensionAdminManager()
+
+ def __get_key_from_value(self, value, values_dict):
+ return filter(lambda v: v[1] == value, values_dict.iteritems())[0][0]
+
+ def load_extra_backends(self):
+ return {
+ "moonlog_api": LogManager(),
+ "tenant_api": TenantManager(),
+ # "resource_api": resource.Manager(),
+ }
+
+ def config_overrides(self):
+ super(TestIntraExtensionAdminManager, self).config_overrides()
+ self.policy_directory = '../../../examples/moon/policies'
+ self.config_fixture.config(
+ group='moon',
+ intraextension_driver='keystone.contrib.moon.backends.sql.IntraExtensionConnector')
+ self.config_fixture.config(
+ group='moon',
+ policy_directory=self.policy_directory)
+
+ def create_intra_extension(self, policy_model="policy_rbac_admin"):
+ # Create the admin user because IntraExtension needs it
+ self.admin = self.identity_api.create_user(USER_ADMIN)
+ IE["policymodel"] = policy_model
+ self.ref = self.manager.load_intra_extension(IE)
+ self.assertIsInstance(self.ref, dict)
+ self.create_tenant(self.ref["id"])
+
+ def create_tenant(self, authz_uuid):
+ tenant = {
+ "id": uuid.uuid4().hex,
+ "name": "TestAuthzIntraExtensionManager",
+ "enabled": True,
+ "description": "",
+ "domain_id": "default"
+ }
+ project = self.resource_api.create_project(tenant["id"], tenant)
+ mapping = self.tenant_api.set_tenant_dict(project["id"], project["name"], authz_uuid, None)
+ self.assertIsInstance(mapping, dict)
+ self.assertIn("authz", mapping)
+ self.assertEqual(mapping["authz"], authz_uuid)
+ return mapping
+
+ def create_user(self, username="TestAdminIntraExtensionManagerUser"):
+ user = {
+ "id": uuid.uuid4().hex,
+ "name": username,
+ "enabled": True,
+ "description": "",
+ "domain_id": "default"
+ }
+ _user = self.identity_api.create_user(user)
+ return _user
+
+ def delete_admin_intra_extension(self):
+ self.manager.delete_intra_extension(self.ref["id"])
+
+ def test_subjects(self):
+ self.create_intra_extension()
+
+ subjects = self.manager.get_subject_dict("admin", self.ref["id"])
+ self.assertIsInstance(subjects, dict)
+ self.assertIn("subjects", subjects)
+ self.assertIn("id", subjects)
+ self.assertIn("intra_extension_uuid", subjects)
+ self.assertEqual(self.ref["id"], subjects["intra_extension_uuid"])
+ self.assertIsInstance(subjects["subjects"], dict)
+
+ new_subject = self.create_user()
+ new_subjects = dict()
+ new_subjects[new_subject["id"]] = new_subject["name"]
+ subjects = self.manager.set_subject_dict("admin", self.ref["id"], new_subjects)
+ self.assertIsInstance(subjects, dict)
+ self.assertIn("subjects", subjects)
+ self.assertIn("id", subjects)
+ self.assertIn("intra_extension_uuid", subjects)
+ self.assertEqual(self.ref["id"], subjects["intra_extension_uuid"])
+ self.assertEqual(subjects["subjects"], new_subjects)
+ self.assertIn(new_subject["id"], subjects["subjects"])
+
+ # Delete the new subject
+ self.manager.del_subject("admin", self.ref["id"], new_subject["id"])
+ subjects = self.manager.get_subject_dict("admin", self.ref["id"])
+ self.assertIsInstance(subjects, dict)
+ self.assertIn("subjects", subjects)
+ self.assertIn("id", subjects)
+ self.assertIn("intra_extension_uuid", subjects)
+ self.assertEqual(self.ref["id"], subjects["intra_extension_uuid"])
+ self.assertNotIn(new_subject["id"], subjects["subjects"])
+
+ # Add a particular subject
+ subjects = self.manager.add_subject_dict("admin", self.ref["id"], new_subject["id"])
+ self.assertIsInstance(subjects, dict)
+ self.assertIn("subject", subjects)
+ self.assertIn("uuid", subjects["subject"])
+ self.assertEqual(new_subject["name"], subjects["subject"]["name"])
+ subjects = self.manager.get_subject_dict("admin", self.ref["id"])
+ self.assertIsInstance(subjects, dict)
+ self.assertIn("subjects", subjects)
+ self.assertIn("id", subjects)
+ self.assertIn("intra_extension_uuid", subjects)
+ self.assertEqual(self.ref["id"], subjects["intra_extension_uuid"])
+ self.assertIn(new_subject["id"], subjects["subjects"])
+
+ def test_objects(self):
+ self.create_intra_extension()
+
+ objects = self.manager.get_object_dict("admin", self.ref["id"])
+ self.assertIsInstance(objects, dict)
+ self.assertIn("objects", objects)
+ self.assertIn("id", objects)
+ self.assertIn("intra_extension_uuid", objects)
+ self.assertEqual(self.ref["id"], objects["intra_extension_uuid"])
+ self.assertIsInstance(objects["objects"], dict)
+
+ new_object = self.create_user()
+ new_objects = dict()
+ new_objects[new_object["id"]] = new_object["name"]
+ objects = self.manager.set_object_dict("admin", self.ref["id"], new_objects)
+ self.assertIsInstance(objects, dict)
+ self.assertIn("objects", objects)
+ self.assertIn("id", objects)
+ self.assertIn("intra_extension_uuid", objects)
+ self.assertEqual(self.ref["id"], objects["intra_extension_uuid"])
+ self.assertEqual(objects["objects"], new_objects)
+ self.assertIn(new_object["id"], objects["objects"])
+
+ # Delete the new object
+ self.manager.del_object("admin", self.ref["id"], new_object["id"])
+ objects = self.manager.get_object_dict("admin", self.ref["id"])
+ self.assertIsInstance(objects, dict)
+ self.assertIn("objects", objects)
+ self.assertIn("id", objects)
+ self.assertIn("intra_extension_uuid", objects)
+ self.assertEqual(self.ref["id"], objects["intra_extension_uuid"])
+ self.assertNotIn(new_object["id"], objects["objects"])
+
+ # Add a particular object
+ objects = self.manager.add_object_dict("admin", self.ref["id"], new_object["name"])
+ self.assertIsInstance(objects, dict)
+ self.assertIn("object", objects)
+ self.assertIn("uuid", objects["object"])
+ self.assertEqual(new_object["name"], objects["object"]["name"])
+ new_object["id"] = objects["object"]["uuid"]
+ objects = self.manager.get_object_dict("admin", self.ref["id"])
+ self.assertIsInstance(objects, dict)
+ self.assertIn("objects", objects)
+ self.assertIn("id", objects)
+ self.assertIn("intra_extension_uuid", objects)
+ self.assertEqual(self.ref["id"], objects["intra_extension_uuid"])
+ self.assertIn(new_object["id"], objects["objects"])
+
+ def test_actions(self):
+ self.create_intra_extension()
+
+ actions = self.manager.get_action_dict("admin", self.ref["id"])
+ self.assertIsInstance(actions, dict)
+ self.assertIn("actions", actions)
+ self.assertIn("id", actions)
+ self.assertIn("intra_extension_uuid", actions)
+ self.assertEqual(self.ref["id"], actions["intra_extension_uuid"])
+ self.assertIsInstance(actions["actions"], dict)
+
+ new_action = self.create_user()
+ new_actions = dict()
+ new_actions[new_action["id"]] = new_action["name"]
+ actions = self.manager.set_action_dict("admin", self.ref["id"], new_actions)
+ self.assertIsInstance(actions, dict)
+ self.assertIn("actions", actions)
+ self.assertIn("id", actions)
+ self.assertIn("intra_extension_uuid", actions)
+ self.assertEqual(self.ref["id"], actions["intra_extension_uuid"])
+ self.assertEqual(actions["actions"], new_actions)
+ self.assertIn(new_action["id"], actions["actions"])
+
+ # Delete the new action
+ self.manager.del_action("admin", self.ref["id"], new_action["id"])
+ actions = self.manager.get_action_dict("admin", self.ref["id"])
+ self.assertIsInstance(actions, dict)
+ self.assertIn("actions", actions)
+ self.assertIn("id", actions)
+ self.assertIn("intra_extension_uuid", actions)
+ self.assertEqual(self.ref["id"], actions["intra_extension_uuid"])
+ self.assertNotIn(new_action["id"], actions["actions"])
+
+ # Add a particular action
+ actions = self.manager.add_action_dict("admin", self.ref["id"], new_action["name"])
+ self.assertIsInstance(actions, dict)
+ self.assertIn("action", actions)
+ self.assertIn("uuid", actions["action"])
+ self.assertEqual(new_action["name"], actions["action"]["name"])
+ new_action["id"] = actions["action"]["uuid"]
+ actions = self.manager.get_action_dict("admin", self.ref["id"])
+ self.assertIsInstance(actions, dict)
+ self.assertIn("actions", actions)
+ self.assertIn("id", actions)
+ self.assertIn("intra_extension_uuid", actions)
+ self.assertEqual(self.ref["id"], actions["intra_extension_uuid"])
+ self.assertIn(new_action["id"], actions["actions"])
+
+ def test_subject_categories(self):
+ self.create_intra_extension()
+
+ subject_categories = self.manager.get_subject_category_dict("admin", self.ref["id"])
+ self.assertIsInstance(subject_categories, dict)
+ self.assertIn("subject_categories", subject_categories)
+ self.assertIn("id", subject_categories)
+ self.assertIn("intra_extension_uuid", subject_categories)
+ self.assertEqual(self.ref["id"], subject_categories["intra_extension_uuid"])
+ self.assertIsInstance(subject_categories["subject_categories"], dict)
+
+ new_subject_category = {"id": uuid.uuid4().hex, "name": "subject_category_test"}
+ new_subject_categories = dict()
+ new_subject_categories[new_subject_category["id"]] = new_subject_category["name"]
+ subject_categories = self.manager.set_subject_category_dict("admin", self.ref["id"], new_subject_categories)
+ self.assertIsInstance(subject_categories, dict)
+ self.assertIn("subject_categories", subject_categories)
+ self.assertIn("id", subject_categories)
+ self.assertIn("intra_extension_uuid", subject_categories)
+ self.assertEqual(self.ref["id"], subject_categories["intra_extension_uuid"])
+ self.assertEqual(subject_categories["subject_categories"], new_subject_categories)
+ self.assertIn(new_subject_category["id"], subject_categories["subject_categories"])
+
+ # Delete the new subject_category
+ self.manager.del_subject_category("admin", self.ref["id"], new_subject_category["id"])
+ subject_categories = self.manager.get_subject_category_dict("admin", self.ref["id"])
+ self.assertIsInstance(subject_categories, dict)
+ self.assertIn("subject_categories", subject_categories)
+ self.assertIn("id", subject_categories)
+ self.assertIn("intra_extension_uuid", subject_categories)
+ self.assertEqual(self.ref["id"], subject_categories["intra_extension_uuid"])
+ self.assertNotIn(new_subject_category["id"], subject_categories["subject_categories"])
+
+ # Add a particular subject_category
+ subject_categories = self.manager.add_subject_category_dict(
+ "admin",
+ self.ref["id"],
+ new_subject_category["name"])
+ self.assertIsInstance(subject_categories, dict)
+ self.assertIn("subject_category", subject_categories)
+ self.assertIn("uuid", subject_categories["subject_category"])
+ self.assertEqual(new_subject_category["name"], subject_categories["subject_category"]["name"])
+ new_subject_category["id"] = subject_categories["subject_category"]["uuid"]
+ subject_categories = self.manager.get_subject_category_dict(
+ "admin",
+ self.ref["id"])
+ self.assertIsInstance(subject_categories, dict)
+ self.assertIn("subject_categories", subject_categories)
+ self.assertIn("id", subject_categories)
+ self.assertIn("intra_extension_uuid", subject_categories)
+ self.assertEqual(self.ref["id"], subject_categories["intra_extension_uuid"])
+ self.assertIn(new_subject_category["id"], subject_categories["subject_categories"])
+
+ def test_object_categories(self):
+ self.create_intra_extension()
+
+ object_categories = self.manager.get_object_category_dict("admin", self.ref["id"])
+ self.assertIsInstance(object_categories, dict)
+ self.assertIn("object_categories", object_categories)
+ self.assertIn("id", object_categories)
+ self.assertIn("intra_extension_uuid", object_categories)
+ self.assertEqual(self.ref["id"], object_categories["intra_extension_uuid"])
+ self.assertIsInstance(object_categories["object_categories"], dict)
+
+ new_object_category = {"id": uuid.uuid4().hex, "name": "object_category_test"}
+ new_object_categories = dict()
+ new_object_categories[new_object_category["id"]] = new_object_category["name"]
+ object_categories = self.manager.set_object_category_dict("admin", self.ref["id"], new_object_categories)
+ self.assertIsInstance(object_categories, dict)
+ self.assertIn("object_categories", object_categories)
+ self.assertIn("id", object_categories)
+ self.assertIn("intra_extension_uuid", object_categories)
+ self.assertEqual(self.ref["id"], object_categories["intra_extension_uuid"])
+ self.assertEqual(object_categories["object_categories"], new_object_categories)
+ self.assertIn(new_object_category["id"], object_categories["object_categories"])
+
+ # Delete the new object_category
+ self.manager.del_object_category("admin", self.ref["id"], new_object_category["id"])
+ object_categories = self.manager.get_object_category_dict("admin", self.ref["id"])
+ self.assertIsInstance(object_categories, dict)
+ self.assertIn("object_categories", object_categories)
+ self.assertIn("id", object_categories)
+ self.assertIn("intra_extension_uuid", object_categories)
+ self.assertEqual(self.ref["id"], object_categories["intra_extension_uuid"])
+ self.assertNotIn(new_object_category["id"], object_categories["object_categories"])
+
+ # Add a particular object_category
+ object_categories = self.manager.add_object_category_dict(
+ "admin",
+ self.ref["id"],
+ new_object_category["name"])
+ self.assertIsInstance(object_categories, dict)
+ self.assertIn("object_category", object_categories)
+ self.assertIn("uuid", object_categories["object_category"])
+ self.assertEqual(new_object_category["name"], object_categories["object_category"]["name"])
+ new_object_category["id"] = object_categories["object_category"]["uuid"]
+ object_categories = self.manager.get_object_category_dict(
+ "admin",
+ self.ref["id"])
+ self.assertIsInstance(object_categories, dict)
+ self.assertIn("object_categories", object_categories)
+ self.assertIn("id", object_categories)
+ self.assertIn("intra_extension_uuid", object_categories)
+ self.assertEqual(self.ref["id"], object_categories["intra_extension_uuid"])
+ self.assertIn(new_object_category["id"], object_categories["object_categories"])
+
+ def test_action_categories(self):
+ self.create_intra_extension()
+
+ action_categories = self.manager.get_action_category_dict("admin", self.ref["id"])
+ self.assertIsInstance(action_categories, dict)
+ self.assertIn("action_categories", action_categories)
+ self.assertIn("id", action_categories)
+ self.assertIn("intra_extension_uuid", action_categories)
+ self.assertEqual(self.ref["id"], action_categories["intra_extension_uuid"])
+ self.assertIsInstance(action_categories["action_categories"], dict)
+
+ new_action_category = {"id": uuid.uuid4().hex, "name": "action_category_test"}
+ new_action_categories = dict()
+ new_action_categories[new_action_category["id"]] = new_action_category["name"]
+ action_categories = self.manager.set_action_category_dict("admin", self.ref["id"], new_action_categories)
+ self.assertIsInstance(action_categories, dict)
+ self.assertIn("action_categories", action_categories)
+ self.assertIn("id", action_categories)
+ self.assertIn("intra_extension_uuid", action_categories)
+ self.assertEqual(self.ref["id"], action_categories["intra_extension_uuid"])
+ self.assertEqual(action_categories["action_categories"], new_action_categories)
+ self.assertIn(new_action_category["id"], action_categories["action_categories"])
+
+ # Delete the new action_category
+ self.manager.del_action_category("admin", self.ref["id"], new_action_category["id"])
+ action_categories = self.manager.get_action_category_dict("admin", self.ref["id"])
+ self.assertIsInstance(action_categories, dict)
+ self.assertIn("action_categories", action_categories)
+ self.assertIn("id", action_categories)
+ self.assertIn("intra_extension_uuid", action_categories)
+ self.assertEqual(self.ref["id"], action_categories["intra_extension_uuid"])
+ self.assertNotIn(new_action_category["id"], action_categories["action_categories"])
+
+ # Add a particular action_category
+ action_categories = self.manager.add_action_category_dict(
+ "admin",
+ self.ref["id"],
+ new_action_category["name"])
+ self.assertIsInstance(action_categories, dict)
+ self.assertIn("action_category", action_categories)
+ self.assertIn("uuid", action_categories["action_category"])
+ self.assertEqual(new_action_category["name"], action_categories["action_category"]["name"])
+ new_action_category["id"] = action_categories["action_category"]["uuid"]
+ action_categories = self.manager.get_action_category_dict(
+ "admin",
+ self.ref["id"])
+ self.assertIsInstance(action_categories, dict)
+ self.assertIn("action_categories", action_categories)
+ self.assertIn("id", action_categories)
+ self.assertIn("intra_extension_uuid", action_categories)
+ self.assertEqual(self.ref["id"], action_categories["intra_extension_uuid"])
+ self.assertIn(new_action_category["id"], action_categories["action_categories"])
+
+ def test_subject_category_scope(self):
+ self.create_intra_extension()
+
+ subject_categories = self.manager.set_subject_category_dict(
+ "admin",
+ self.ref["id"],
+ {
+ uuid.uuid4().hex: "admin",
+ uuid.uuid4().hex: "dev",
+ }
+ )
+
+ for subject_category in subject_categories["subject_categories"]:
+ subject_category_scope = self.manager.get_subject_category_scope_dict(
+ "admin",
+ self.ref["id"],
+ subject_category)
+ self.assertIsInstance(subject_category_scope, dict)
+ self.assertIn("subject_category_scope", subject_category_scope)
+ self.assertIn("id", subject_category_scope)
+ self.assertIn("intra_extension_uuid", subject_category_scope)
+ self.assertEqual(self.ref["id"], subject_category_scope["intra_extension_uuid"])
+ self.assertIsInstance(subject_category_scope["subject_category_scope"], dict)
+
+ new_subject_category_scope = dict()
+ new_subject_category_scope_uuid = uuid.uuid4().hex
+ new_subject_category_scope[new_subject_category_scope_uuid] = "new_subject_category_scope"
+ subject_category_scope = self.manager.set_subject_category_scope_dict(
+ "admin",
+ self.ref["id"],
+ subject_category,
+ new_subject_category_scope)
+ self.assertIsInstance(subject_category_scope, dict)
+ self.assertIn("subject_category_scope", subject_category_scope)
+ self.assertIn("id", subject_category_scope)
+ self.assertIn("intra_extension_uuid", subject_category_scope)
+ self.assertEqual(self.ref["id"], subject_category_scope["intra_extension_uuid"])
+ self.assertIn(new_subject_category_scope[new_subject_category_scope_uuid],
+ subject_category_scope["subject_category_scope"][subject_category].values())
+
+ # Delete the new subject_category_scope
+ self.manager.del_subject_category_scope(
+ "admin",
+ self.ref["id"],
+ subject_category,
+ new_subject_category_scope_uuid)
+ subject_category_scope = self.manager.get_subject_category_scope_dict(
+ "admin",
+ self.ref["id"],
+ subject_category)
+ self.assertIsInstance(subject_category_scope, dict)
+ self.assertIn("subject_category_scope", subject_category_scope)
+ self.assertIn("id", subject_category_scope)
+ self.assertIn("intra_extension_uuid", subject_category_scope)
+ self.assertEqual(self.ref["id"], subject_category_scope["intra_extension_uuid"])
+ self.assertNotIn(new_subject_category_scope_uuid, subject_category_scope["subject_category_scope"])
+
+ # Add a particular subject_category_scope
+ subject_category_scope = self.manager.add_subject_category_scope_dict(
+ "admin",
+ self.ref["id"],
+ subject_category,
+ new_subject_category_scope[new_subject_category_scope_uuid])
+ self.assertIsInstance(subject_category_scope, dict)
+ self.assertIn("subject_category_scope", subject_category_scope)
+ self.assertIn("uuid", subject_category_scope["subject_category_scope"])
+ self.assertEqual(new_subject_category_scope[new_subject_category_scope_uuid],
+ subject_category_scope["subject_category_scope"]["name"])
+ subject_category_scope = self.manager.get_subject_category_scope_dict(
+ "admin",
+ self.ref["id"],
+ subject_category)
+ self.assertIsInstance(subject_category_scope, dict)
+ self.assertIn("subject_category_scope", subject_category_scope)
+ self.assertIn("id", subject_category_scope)
+ self.assertIn("intra_extension_uuid", subject_category_scope)
+ self.assertEqual(self.ref["id"], subject_category_scope["intra_extension_uuid"])
+ self.assertNotIn(new_subject_category_scope_uuid, subject_category_scope["subject_category_scope"])
+
+ def test_object_category_scope(self):
+ self.create_intra_extension()
+
+ object_categories = self.manager.set_object_category_dict(
+ "admin",
+ self.ref["id"],
+ {
+ uuid.uuid4().hex: "id",
+ uuid.uuid4().hex: "domain",
+ }
+ )
+
+ for object_category in object_categories["object_categories"]:
+ object_category_scope = self.manager.get_object_category_scope_dict(
+ "admin",
+ self.ref["id"],
+ object_category)
+ self.assertIsInstance(object_category_scope, dict)
+ self.assertIn("object_category_scope", object_category_scope)
+ self.assertIn("id", object_category_scope)
+ self.assertIn("intra_extension_uuid", object_category_scope)
+ self.assertEqual(self.ref["id"], object_category_scope["intra_extension_uuid"])
+ self.assertIsInstance(object_category_scope["object_category_scope"], dict)
+
+ new_object_category_scope = dict()
+ new_object_category_scope_uuid = uuid.uuid4().hex
+ new_object_category_scope[new_object_category_scope_uuid] = "new_object_category_scope"
+ object_category_scope = self.manager.set_object_category_scope_dict(
+ "admin",
+ self.ref["id"],
+ object_category,
+ new_object_category_scope)
+ self.assertIsInstance(object_category_scope, dict)
+ self.assertIn("object_category_scope", object_category_scope)
+ self.assertIn("id", object_category_scope)
+ self.assertIn("intra_extension_uuid", object_category_scope)
+ self.assertEqual(self.ref["id"], object_category_scope["intra_extension_uuid"])
+ self.assertIn(new_object_category_scope[new_object_category_scope_uuid],
+ object_category_scope["object_category_scope"][object_category].values())
+
+ # Delete the new object_category_scope
+ self.manager.del_object_category_scope(
+ "admin",
+ self.ref["id"],
+ object_category,
+ new_object_category_scope_uuid)
+ object_category_scope = self.manager.get_object_category_scope_dict(
+ "admin",
+ self.ref["id"],
+ object_category)
+ self.assertIsInstance(object_category_scope, dict)
+ self.assertIn("object_category_scope", object_category_scope)
+ self.assertIn("id", object_category_scope)
+ self.assertIn("intra_extension_uuid", object_category_scope)
+ self.assertEqual(self.ref["id"], object_category_scope["intra_extension_uuid"])
+ self.assertNotIn(new_object_category_scope_uuid, object_category_scope["object_category_scope"])
+
+ # Add a particular object_category_scope
+ object_category_scope = self.manager.add_object_category_scope_dict(
+ "admin",
+ self.ref["id"],
+ object_category,
+ new_object_category_scope[new_object_category_scope_uuid])
+ self.assertIsInstance(object_category_scope, dict)
+ self.assertIn("object_category_scope", object_category_scope)
+ self.assertIn("uuid", object_category_scope["object_category_scope"])
+ self.assertEqual(new_object_category_scope[new_object_category_scope_uuid],
+ object_category_scope["object_category_scope"]["name"])
+ object_category_scope = self.manager.get_object_category_scope_dict(
+ "admin",
+ self.ref["id"],
+ object_category)
+ self.assertIsInstance(object_category_scope, dict)
+ self.assertIn("object_category_scope", object_category_scope)
+ self.assertIn("id", object_category_scope)
+ self.assertIn("intra_extension_uuid", object_category_scope)
+ self.assertEqual(self.ref["id"], object_category_scope["intra_extension_uuid"])
+ self.assertNotIn(new_object_category_scope_uuid, object_category_scope["object_category_scope"])
+
+ def test_action_category_scope(self):
+ self.create_intra_extension()
+
+ action_categories = self.manager.set_action_category_dict(
+ "admin",
+ self.ref["id"],
+ {
+ uuid.uuid4().hex: "compute",
+ uuid.uuid4().hex: "identity",
+ }
+ )
+
+ for action_category in action_categories["action_categories"]:
+ action_category_scope = self.manager.get_action_category_scope_dict(
+ "admin",
+ self.ref["id"],
+ action_category)
+ self.assertIsInstance(action_category_scope, dict)
+ self.assertIn("action_category_scope", action_category_scope)
+ self.assertIn("id", action_category_scope)
+ self.assertIn("intra_extension_uuid", action_category_scope)
+ self.assertEqual(self.ref["id"], action_category_scope["intra_extension_uuid"])
+ self.assertIsInstance(action_category_scope["action_category_scope"], dict)
+
+ new_action_category_scope = dict()
+ new_action_category_scope_uuid = uuid.uuid4().hex
+ new_action_category_scope[new_action_category_scope_uuid] = "new_action_category_scope"
+ action_category_scope = self.manager.set_action_category_scope_dict(
+ "admin",
+ self.ref["id"],
+ action_category,
+ new_action_category_scope)
+ self.assertIsInstance(action_category_scope, dict)
+ self.assertIn("action_category_scope", action_category_scope)
+ self.assertIn("id", action_category_scope)
+ self.assertIn("intra_extension_uuid", action_category_scope)
+ self.assertEqual(self.ref["id"], action_category_scope["intra_extension_uuid"])
+ self.assertIn(new_action_category_scope[new_action_category_scope_uuid],
+ action_category_scope["action_category_scope"][action_category].values())
+
+ # Delete the new action_category_scope
+ self.manager.del_action_category_scope(
+ "admin",
+ self.ref["id"],
+ action_category,
+ new_action_category_scope_uuid)
+ action_category_scope = self.manager.get_action_category_scope_dict(
+ "admin",
+ self.ref["id"],
+ action_category)
+ self.assertIsInstance(action_category_scope, dict)
+ self.assertIn("action_category_scope", action_category_scope)
+ self.assertIn("id", action_category_scope)
+ self.assertIn("intra_extension_uuid", action_category_scope)
+ self.assertEqual(self.ref["id"], action_category_scope["intra_extension_uuid"])
+ self.assertNotIn(new_action_category_scope_uuid, action_category_scope["action_category_scope"])
+
+ # Add a particular action_category_scope
+ action_category_scope = self.manager.add_action_category_scope_dict(
+ "admin",
+ self.ref["id"],
+ action_category,
+ new_action_category_scope[new_action_category_scope_uuid])
+ self.assertIsInstance(action_category_scope, dict)
+ self.assertIn("action_category_scope", action_category_scope)
+ self.assertIn("uuid", action_category_scope["action_category_scope"])
+ self.assertEqual(new_action_category_scope[new_action_category_scope_uuid],
+ action_category_scope["action_category_scope"]["name"])
+ action_category_scope = self.manager.get_action_category_scope_dict(
+ "admin",
+ self.ref["id"],
+ action_category)
+ self.assertIsInstance(action_category_scope, dict)
+ self.assertIn("action_category_scope", action_category_scope)
+ self.assertIn("id", action_category_scope)
+ self.assertIn("intra_extension_uuid", action_category_scope)
+ self.assertEqual(self.ref["id"], action_category_scope["intra_extension_uuid"])
+ self.assertNotIn(new_action_category_scope_uuid, action_category_scope["action_category_scope"])
+
+ def test_subject_category_assignment(self):
+ self.create_intra_extension()
+
+ new_subject = self.create_user()
+ new_subjects = dict()
+ new_subjects[new_subject["id"]] = new_subject["name"]
+ subjects = self.manager.set_subject_dict("admin", self.ref["id"], new_subjects)
+
+ new_subject_category_uuid = uuid.uuid4().hex
+ new_subject_category_value = "role"
+ subject_categories = self.manager.set_subject_category_dict(
+ "admin",
+ self.ref["id"],
+ {
+ new_subject_category_uuid: new_subject_category_value
+ }
+ )
+
+ for subject_category in subject_categories["subject_categories"]:
+ subject_category_scope = self.manager.get_subject_category_scope_dict(
+ "admin",
+ self.ref["id"],
+ subject_category)
+ self.assertIsInstance(subject_category_scope, dict)
+ self.assertIn("subject_category_scope", subject_category_scope)
+ self.assertIn("id", subject_category_scope)
+ self.assertIn("intra_extension_uuid", subject_category_scope)
+ self.assertEqual(self.ref["id"], subject_category_scope["intra_extension_uuid"])
+ self.assertIsInstance(subject_category_scope["subject_category_scope"], dict)
+
+ new_subject_category_scope = dict()
+ new_subject_category_scope_uuid = uuid.uuid4().hex
+ new_subject_category_scope[new_subject_category_scope_uuid] = "admin"
+ subject_category_scope = self.manager.set_subject_category_scope_dict(
+ "admin",
+ self.ref["id"],
+ subject_category,
+ new_subject_category_scope)
+ self.assertIsInstance(subject_category_scope, dict)
+ self.assertIn("subject_category_scope", subject_category_scope)
+ self.assertIn("id", subject_category_scope)
+ self.assertIn("intra_extension_uuid", subject_category_scope)
+ self.assertEqual(self.ref["id"], subject_category_scope["intra_extension_uuid"])
+ self.assertIn(new_subject_category_scope[new_subject_category_scope_uuid],
+ subject_category_scope["subject_category_scope"][subject_category].values())
+
+ new_subject_category_scope2 = dict()
+ new_subject_category_scope2_uuid = uuid.uuid4().hex
+ new_subject_category_scope2[new_subject_category_scope2_uuid] = "dev"
+ subject_category_scope = self.manager.set_subject_category_scope_dict(
+ "admin",
+ self.ref["id"],
+ subject_category,
+ new_subject_category_scope2)
+ self.assertIsInstance(subject_category_scope, dict)
+ self.assertIn("subject_category_scope", subject_category_scope)
+ self.assertIn("id", subject_category_scope)
+ self.assertIn("intra_extension_uuid", subject_category_scope)
+ self.assertEqual(self.ref["id"], subject_category_scope["intra_extension_uuid"])
+ self.assertIn(new_subject_category_scope2[new_subject_category_scope2_uuid],
+ subject_category_scope["subject_category_scope"][subject_category].values())
+
+ subject_category_assignments = self.manager.get_subject_category_assignment_dict(
+ "admin",
+ self.ref["id"],
+ new_subject["id"]
+ )
+ self.assertIsInstance(subject_category_assignments, dict)
+ self.assertIn("subject_category_assignments", subject_category_assignments)
+ self.assertIn("id", subject_category_assignments)
+ self.assertIn("intra_extension_uuid", subject_category_assignments)
+ self.assertEqual(self.ref["id"], subject_category_assignments["intra_extension_uuid"])
+ self.assertEqual({}, subject_category_assignments["subject_category_assignments"][new_subject["id"]])
+
+ subject_category_assignments = self.manager.set_subject_category_assignment_dict(
+ "admin",
+ self.ref["id"],
+ new_subject["id"],
+ {
+ new_subject_category_uuid: [new_subject_category_scope_uuid, new_subject_category_scope2_uuid],
+ }
+ )
+ self.assertIsInstance(subject_category_assignments, dict)
+ self.assertIn("subject_category_assignments", subject_category_assignments)
+ self.assertIn("id", subject_category_assignments)
+ self.assertIn("intra_extension_uuid", subject_category_assignments)
+ self.assertEqual(self.ref["id"], subject_category_assignments["intra_extension_uuid"])
+ self.assertEqual(
+ {new_subject_category_uuid: [new_subject_category_scope_uuid, new_subject_category_scope2_uuid]},
+ subject_category_assignments["subject_category_assignments"][new_subject["id"]])
+ subject_category_assignments = self.manager.get_subject_category_assignment_dict(
+ "admin",
+ self.ref["id"],
+ new_subject["id"]
+ )
+ self.assertIsInstance(subject_category_assignments, dict)
+ self.assertIn("subject_category_assignments", subject_category_assignments)
+ self.assertIn("id", subject_category_assignments)
+ self.assertIn("intra_extension_uuid", subject_category_assignments)
+ self.assertEqual(self.ref["id"], subject_category_assignments["intra_extension_uuid"])
+ self.assertEqual(
+ {new_subject_category_uuid: [new_subject_category_scope_uuid, new_subject_category_scope2_uuid]},
+ subject_category_assignments["subject_category_assignments"][new_subject["id"]])
+
+ self.manager.del_subject_category_assignment(
+ "admin",
+ self.ref["id"],
+ new_subject["id"],
+ new_subject_category_uuid,
+ new_subject_category_scope_uuid
+ )
+ subject_category_assignments = self.manager.get_subject_category_assignment_dict(
+ "admin",
+ self.ref["id"],
+ new_subject["id"]
+ )
+ self.assertIsInstance(subject_category_assignments, dict)
+ self.assertIn("subject_category_assignments", subject_category_assignments)
+ self.assertIn("id", subject_category_assignments)
+ self.assertIn("intra_extension_uuid", subject_category_assignments)
+ self.assertEqual(self.ref["id"], subject_category_assignments["intra_extension_uuid"])
+ self.assertEqual(
+ {new_subject_category_uuid: [new_subject_category_scope2_uuid, ]},
+ subject_category_assignments["subject_category_assignments"][new_subject["id"]])
+
+ data = self.manager.add_subject_category_assignment_dict(
+ "admin",
+ self.ref["id"],
+ new_subject["id"],
+ new_subject_category_uuid,
+ new_subject_category_scope_uuid
+ )
+
+ subject_category_assignments = self.manager.get_subject_category_assignment_dict(
+ "admin",
+ self.ref["id"],
+ new_subject["id"]
+ )
+ self.assertIsInstance(subject_category_assignments, dict)
+ self.assertIn("subject_category_assignments", subject_category_assignments)
+ self.assertIn("id", subject_category_assignments)
+ self.assertIn("intra_extension_uuid", subject_category_assignments)
+ self.assertEqual(self.ref["id"], subject_category_assignments["intra_extension_uuid"])
+ self.assertEqual(
+ {new_subject_category_uuid: [new_subject_category_scope2_uuid, new_subject_category_scope_uuid]},
+ subject_category_assignments["subject_category_assignments"][new_subject["id"]])
+
+ def test_object_category_assignment(self):
+ self.create_intra_extension()
+
+ new_object = self.create_user()
+ new_objects = dict()
+ new_objects[new_object["id"]] = new_object["name"]
+ objects = self.manager.set_object_dict("admin", self.ref["id"], new_objects)
+
+ new_object_category_uuid = uuid.uuid4().hex
+ new_object_category_value = "role"
+ object_categories = self.manager.set_object_category_dict(
+ "admin",
+ self.ref["id"],
+ {
+ new_object_category_uuid: new_object_category_value
+ }
+ )
+
+ for object_category in object_categories["object_categories"]:
+ object_category_scope = self.manager.get_object_category_scope_dict(
+ "admin",
+ self.ref["id"],
+ object_category)
+ self.assertIsInstance(object_category_scope, dict)
+ self.assertIn("object_category_scope", object_category_scope)
+ self.assertIn("id", object_category_scope)
+ self.assertIn("intra_extension_uuid", object_category_scope)
+ self.assertEqual(self.ref["id"], object_category_scope["intra_extension_uuid"])
+ self.assertIsInstance(object_category_scope["object_category_scope"], dict)
+
+ new_object_category_scope = dict()
+ new_object_category_scope_uuid = uuid.uuid4().hex
+ new_object_category_scope[new_object_category_scope_uuid] = "admin"
+ object_category_scope = self.manager.set_object_category_scope_dict(
+ "admin",
+ self.ref["id"],
+ object_category,
+ new_object_category_scope)
+ self.assertIsInstance(object_category_scope, dict)
+ self.assertIn("object_category_scope", object_category_scope)
+ self.assertIn("id", object_category_scope)
+ self.assertIn("intra_extension_uuid", object_category_scope)
+ self.assertEqual(self.ref["id"], object_category_scope["intra_extension_uuid"])
+ self.assertIn(new_object_category_scope[new_object_category_scope_uuid],
+ object_category_scope["object_category_scope"][object_category].values())
+
+ new_object_category_scope2 = dict()
+ new_object_category_scope2_uuid = uuid.uuid4().hex
+ new_object_category_scope2[new_object_category_scope2_uuid] = "dev"
+ object_category_scope = self.manager.set_object_category_scope_dict(
+ "admin",
+ self.ref["id"],
+ object_category,
+ new_object_category_scope2)
+ self.assertIsInstance(object_category_scope, dict)
+ self.assertIn("object_category_scope", object_category_scope)
+ self.assertIn("id", object_category_scope)
+ self.assertIn("intra_extension_uuid", object_category_scope)
+ self.assertEqual(self.ref["id"], object_category_scope["intra_extension_uuid"])
+ self.assertIn(new_object_category_scope2[new_object_category_scope2_uuid],
+ object_category_scope["object_category_scope"][object_category].values())
+
+ object_category_assignments = self.manager.get_object_category_assignment_dict(
+ "admin",
+ self.ref["id"],
+ new_object["id"]
+ )
+ self.assertIsInstance(object_category_assignments, dict)
+ self.assertIn("object_category_assignments", object_category_assignments)
+ self.assertIn("id", object_category_assignments)
+ self.assertIn("intra_extension_uuid", object_category_assignments)
+ self.assertEqual(self.ref["id"], object_category_assignments["intra_extension_uuid"])
+ self.assertEqual({}, object_category_assignments["object_category_assignments"][new_object["id"]])
+
+ object_category_assignments = self.manager.set_object_category_assignment_dict(
+ "admin",
+ self.ref["id"],
+ new_object["id"],
+ {
+ new_object_category_uuid: [new_object_category_scope_uuid, new_object_category_scope2_uuid],
+ }
+ )
+ self.assertIsInstance(object_category_assignments, dict)
+ self.assertIn("object_category_assignments", object_category_assignments)
+ self.assertIn("id", object_category_assignments)
+ self.assertIn("intra_extension_uuid", object_category_assignments)
+ self.assertEqual(self.ref["id"], object_category_assignments["intra_extension_uuid"])
+ self.assertEqual(
+ {new_object_category_uuid: [new_object_category_scope_uuid, new_object_category_scope2_uuid]},
+ object_category_assignments["object_category_assignments"][new_object["id"]])
+ object_category_assignments = self.manager.get_object_category_assignment_dict(
+ "admin",
+ self.ref["id"],
+ new_object["id"]
+ )
+ self.assertIsInstance(object_category_assignments, dict)
+ self.assertIn("object_category_assignments", object_category_assignments)
+ self.assertIn("id", object_category_assignments)
+ self.assertIn("intra_extension_uuid", object_category_assignments)
+ self.assertEqual(self.ref["id"], object_category_assignments["intra_extension_uuid"])
+ self.assertEqual(
+ {new_object_category_uuid: [new_object_category_scope_uuid, new_object_category_scope2_uuid]},
+ object_category_assignments["object_category_assignments"][new_object["id"]])
+
+ self.manager.del_object_category_assignment(
+ "admin",
+ self.ref["id"],
+ new_object["id"],
+ new_object_category_uuid,
+ new_object_category_scope_uuid
+ )
+ object_category_assignments = self.manager.get_object_category_assignment_dict(
+ "admin",
+ self.ref["id"],
+ new_object["id"]
+ )
+ self.assertIsInstance(object_category_assignments, dict)
+ self.assertIn("object_category_assignments", object_category_assignments)
+ self.assertIn("id", object_category_assignments)
+ self.assertIn("intra_extension_uuid", object_category_assignments)
+ self.assertEqual(self.ref["id"], object_category_assignments["intra_extension_uuid"])
+ self.assertEqual(
+ {new_object_category_uuid: [new_object_category_scope2_uuid, ]},
+ object_category_assignments["object_category_assignments"][new_object["id"]])
+
+ self.manager.add_object_category_assignment_dict(
+ "admin",
+ self.ref["id"],
+ new_object["id"],
+ new_object_category_uuid,
+ new_object_category_scope_uuid
+ )
+
+ object_category_assignments = self.manager.get_object_category_assignment_dict(
+ "admin",
+ self.ref["id"],
+ new_object["id"]
+ )
+ self.assertIsInstance(object_category_assignments, dict)
+ self.assertIn("object_category_assignments", object_category_assignments)
+ self.assertIn("id", object_category_assignments)
+ self.assertIn("intra_extension_uuid", object_category_assignments)
+ self.assertEqual(self.ref["id"], object_category_assignments["intra_extension_uuid"])
+ self.assertEqual(
+ {new_object_category_uuid: [new_object_category_scope2_uuid, new_object_category_scope_uuid]},
+ object_category_assignments["object_category_assignments"][new_object["id"]])
+
+ def test_action_category_assignment(self):
+ self.create_intra_extension()
+
+ new_action = self.create_user()
+ new_actions = dict()
+ new_actions[new_action["id"]] = new_action["name"]
+ actions = self.manager.set_action_dict("admin", self.ref["id"], new_actions)
+
+ new_action_category_uuid = uuid.uuid4().hex
+ new_action_category_value = "role"
+ action_categories = self.manager.set_action_category_dict(
+ "admin",
+ self.ref["id"],
+ {
+ new_action_category_uuid: new_action_category_value
+ }
+ )
+
+ for action_category in action_categories["action_categories"]:
+ action_category_scope = self.manager.get_action_category_scope_dict(
+ "admin",
+ self.ref["id"],
+ action_category)
+ self.assertIsInstance(action_category_scope, dict)
+ self.assertIn("action_category_scope", action_category_scope)
+ self.assertIn("id", action_category_scope)
+ self.assertIn("intra_extension_uuid", action_category_scope)
+ self.assertEqual(self.ref["id"], action_category_scope["intra_extension_uuid"])
+ self.assertIsInstance(action_category_scope["action_category_scope"], dict)
+
+ new_action_category_scope = dict()
+ new_action_category_scope_uuid = uuid.uuid4().hex
+ new_action_category_scope[new_action_category_scope_uuid] = "admin"
+ action_category_scope = self.manager.set_action_category_scope_dict(
+ "admin",
+ self.ref["id"],
+ action_category,
+ new_action_category_scope)
+ self.assertIsInstance(action_category_scope, dict)
+ self.assertIn("action_category_scope", action_category_scope)
+ self.assertIn("id", action_category_scope)
+ self.assertIn("intra_extension_uuid", action_category_scope)
+ self.assertEqual(self.ref["id"], action_category_scope["intra_extension_uuid"])
+ self.assertIn(new_action_category_scope[new_action_category_scope_uuid],
+ action_category_scope["action_category_scope"][action_category].values())
+
+ new_action_category_scope2 = dict()
+ new_action_category_scope2_uuid = uuid.uuid4().hex
+ new_action_category_scope2[new_action_category_scope2_uuid] = "dev"
+ action_category_scope = self.manager.set_action_category_scope_dict(
+ "admin",
+ self.ref["id"],
+ action_category,
+ new_action_category_scope2)
+ self.assertIsInstance(action_category_scope, dict)
+ self.assertIn("action_category_scope", action_category_scope)
+ self.assertIn("id", action_category_scope)
+ self.assertIn("intra_extension_uuid", action_category_scope)
+ self.assertEqual(self.ref["id"], action_category_scope["intra_extension_uuid"])
+ self.assertIn(new_action_category_scope2[new_action_category_scope2_uuid],
+ action_category_scope["action_category_scope"][action_category].values())
+
+ action_category_assignments = self.manager.get_action_category_assignment_dict(
+ "admin",
+ self.ref["id"],
+ new_action["id"]
+ )
+ self.assertIsInstance(action_category_assignments, dict)
+ self.assertIn("action_category_assignments", action_category_assignments)
+ self.assertIn("id", action_category_assignments)
+ self.assertIn("intra_extension_uuid", action_category_assignments)
+ self.assertEqual(self.ref["id"], action_category_assignments["intra_extension_uuid"])
+ self.assertEqual({}, action_category_assignments["action_category_assignments"][new_action["id"]])
+
+ action_category_assignments = self.manager.set_action_category_assignment_dict(
+ "admin",
+ self.ref["id"],
+ new_action["id"],
+ {
+ new_action_category_uuid: [new_action_category_scope_uuid, new_action_category_scope2_uuid],
+ }
+ )
+ self.assertIsInstance(action_category_assignments, dict)
+ self.assertIn("action_category_assignments", action_category_assignments)
+ self.assertIn("id", action_category_assignments)
+ self.assertIn("intra_extension_uuid", action_category_assignments)
+ self.assertEqual(self.ref["id"], action_category_assignments["intra_extension_uuid"])
+ self.assertEqual(
+ {new_action_category_uuid: [new_action_category_scope_uuid, new_action_category_scope2_uuid]},
+ action_category_assignments["action_category_assignments"][new_action["id"]])
+ action_category_assignments = self.manager.get_action_category_assignment_dict(
+ "admin",
+ self.ref["id"],
+ new_action["id"]
+ )
+ self.assertIsInstance(action_category_assignments, dict)
+ self.assertIn("action_category_assignments", action_category_assignments)
+ self.assertIn("id", action_category_assignments)
+ self.assertIn("intra_extension_uuid", action_category_assignments)
+ self.assertEqual(self.ref["id"], action_category_assignments["intra_extension_uuid"])
+ self.assertEqual(
+ {new_action_category_uuid: [new_action_category_scope_uuid, new_action_category_scope2_uuid]},
+ action_category_assignments["action_category_assignments"][new_action["id"]])
+
+ self.manager.del_action_category_assignment(
+ "admin",
+ self.ref["id"],
+ new_action["id"],
+ new_action_category_uuid,
+ new_action_category_scope_uuid
+ )
+ action_category_assignments = self.manager.get_action_category_assignment_dict(
+ "admin",
+ self.ref["id"],
+ new_action["id"]
+ )
+ self.assertIsInstance(action_category_assignments, dict)
+ self.assertIn("action_category_assignments", action_category_assignments)
+ self.assertIn("id", action_category_assignments)
+ self.assertIn("intra_extension_uuid", action_category_assignments)
+ self.assertEqual(self.ref["id"], action_category_assignments["intra_extension_uuid"])
+ self.assertEqual(
+ {new_action_category_uuid: [new_action_category_scope2_uuid, ]},
+ action_category_assignments["action_category_assignments"][new_action["id"]])
+
+ self.manager.add_action_category_assignment_dict(
+ "admin",
+ self.ref["id"],
+ new_action["id"],
+ new_action_category_uuid,
+ new_action_category_scope_uuid
+ )
+
+ action_category_assignments = self.manager.get_action_category_assignment_dict(
+ "admin",
+ self.ref["id"],
+ new_action["id"]
+ )
+ self.assertIsInstance(action_category_assignments, dict)
+ self.assertIn("action_category_assignments", action_category_assignments)
+ self.assertIn("id", action_category_assignments)
+ self.assertIn("intra_extension_uuid", action_category_assignments)
+ self.assertEqual(self.ref["id"], action_category_assignments["intra_extension_uuid"])
+ self.assertEqual(
+ {new_action_category_uuid: [new_action_category_scope2_uuid, new_action_category_scope_uuid]},
+ action_category_assignments["action_category_assignments"][new_action["id"]])
+
+ def test_sub_meta_rules(self):
+ self.create_intra_extension()
+
+ aggregation_algorithms = self.manager.get_aggregation_algorithms("admin", self.ref["id"])
+ self.assertIsInstance(aggregation_algorithms, dict)
+ self.assertIsInstance(aggregation_algorithms["aggregation_algorithms"], list)
+ self.assertIn("and_true_aggregation", aggregation_algorithms["aggregation_algorithms"])
+ self.assertIn("test_aggregation", aggregation_algorithms["aggregation_algorithms"])
+
+ aggregation_algorithm = self.manager.get_aggregation_algorithm("admin", self.ref["id"])
+ self.assertIsInstance(aggregation_algorithm, dict)
+ self.assertIn("aggregation", aggregation_algorithm)
+ self.assertIn(aggregation_algorithm["aggregation"], aggregation_algorithms["aggregation_algorithms"])
+
+ _aggregation_algorithm = list(aggregation_algorithms["aggregation_algorithms"])
+ _aggregation_algorithm.remove(aggregation_algorithm["aggregation"])
+ aggregation_algorithm = self.manager.set_aggregation_algorithm("admin", self.ref["id"], _aggregation_algorithm[0])
+ self.assertIsInstance(aggregation_algorithm, dict)
+ self.assertIn("aggregation", aggregation_algorithm)
+ self.assertIn(aggregation_algorithm["aggregation"], aggregation_algorithms["aggregation_algorithms"])
+
+ sub_meta_rules = self.manager.get_sub_meta_rule("admin", self.ref["id"])
+ self.assertIsInstance(sub_meta_rules, dict)
+ self.assertIn("sub_meta_rules", sub_meta_rules)
+ sub_meta_rules_conf = json.load(open(os.path.join(self.policy_directory, self.ref["model"], "metarule.json")))
+ metarule = dict()
+ categories = {
+ "subject_categories": self.manager.get_subject_category_dict("admin", self.ref["id"]),
+ "object_categories": self.manager.get_object_category_dict("admin", self.ref["id"]),
+ "action_categories": self.manager.get_action_category_dict("admin", self.ref["id"])
+ }
+ for relation in sub_meta_rules_conf["sub_meta_rules"]:
+ metarule[relation] = dict()
+ for item in ("subject_categories", "object_categories", "action_categories"):
+ metarule[relation][item] = list()
+ for element in sub_meta_rules_conf["sub_meta_rules"][relation][item]:
+ metarule[relation][item].append(self.__get_key_from_value(
+ element,
+ categories[item][item]
+ ))
+
+ for relation in sub_meta_rules["sub_meta_rules"]:
+ self.assertIn(relation, metarule)
+ for item in ("subject_categories", "object_categories", "action_categories"):
+ self.assertEqual(
+ sub_meta_rules["sub_meta_rules"][relation][item],
+ metarule[relation][item]
+ )
+
+ new_subject_category = {"id": uuid.uuid4().hex, "name": "subject_category_test"}
+ # Add a particular subject_category
+ data = self.manager.add_subject_category_dict(
+ "admin",
+ self.ref["id"],
+ new_subject_category["name"])
+ new_subject_category["id"] = data["subject_category"]["uuid"]
+ subject_categories = self.manager.get_subject_category_dict(
+ "admin",
+ self.ref["id"])
+ self.assertIsInstance(subject_categories, dict)
+ self.assertIn("subject_categories", subject_categories)
+ self.assertIn("id", subject_categories)
+ self.assertIn("intra_extension_uuid", subject_categories)
+ self.assertEqual(self.ref["id"], subject_categories["intra_extension_uuid"])
+ self.assertIn(new_subject_category["id"], subject_categories["subject_categories"])
+ metarule[relation]["subject_categories"].append(new_subject_category["id"])
+ _sub_meta_rules = self.manager.set_sub_meta_rule("admin", self.ref["id"], metarule)
+ self.assertIn(relation, metarule)
+ for item in ("subject_categories", "object_categories", "action_categories"):
+ self.assertEqual(
+ _sub_meta_rules["sub_meta_rules"][relation][item],
+ metarule[relation][item]
+ )
+
+ def test_sub_rules(self):
+ self.create_intra_extension()
+
+ sub_meta_rules = self.manager.get_sub_meta_rule("admin", self.ref["id"])
+ self.assertIsInstance(sub_meta_rules, dict)
+ self.assertIn("sub_meta_rules", sub_meta_rules)
+
+ sub_rules = self.manager.get_sub_rules("admin", self.ref["id"])
+ self.assertIsInstance(sub_rules, dict)
+ self.assertIn("rules", sub_rules)
+ rules = dict()
+ for relation in sub_rules["rules"]:
+ self.assertIn(relation, self.manager.get_sub_meta_rule_relations("admin", self.ref["id"])["sub_meta_rule_relations"])
+ rules[relation] = list()
+ for rule in sub_rules["rules"][relation]:
+ print(rule)
+ for cat, cat_func, func_name in (
+ ("subject_categories", self.manager.get_subject_category_scope_dict, "subject_category_scope"),
+ ("action_categories", self.manager.get_action_category_scope_dict, "action_category_scope"),
+ ("object_categories", self.manager.get_object_category_scope_dict, "object_category_scope"),
+ ):
+ for cat_value in sub_meta_rules["sub_meta_rules"][relation][cat]:
+ scope = cat_func(
+ "admin",
+ self.ref["id"],
+ cat_value
+ )
+ a_scope = rule.pop(0)
+ print(a_scope)
+ if type(a_scope) is not bool:
+ self.assertIn(a_scope, scope[func_name][cat_value])
+
+ # add a new subrule
+
+ relation = sub_rules["rules"].keys()[0]
+ sub_rule = []
+ for cat, cat_func, func_name in (
+ ("subject_categories", self.manager.get_subject_category_scope_dict, "subject_category_scope"),
+ ("action_categories", self.manager.get_action_category_scope_dict, "action_category_scope"),
+ ("object_categories", self.manager.get_object_category_scope_dict, "object_category_scope"),
+ ):
+ for cat_value in sub_meta_rules["sub_meta_rules"][relation][cat]:
+ scope = cat_func(
+ "admin",
+ self.ref["id"],
+ cat_value
+ )
+ sub_rule.append(scope[func_name][cat_value].keys()[0])
+
+ sub_rule.append(True)
+ sub_rules = self.manager.set_sub_rule("admin", self.ref["id"], relation, sub_rule)
+ self.assertIsInstance(sub_rules, dict)
+ self.assertIn("rules", sub_rules)
+ rules = dict()
+ self.assertIn(sub_rule, sub_rules["rules"][relation])
+ for relation in sub_rules["rules"]:
+ self.assertIn(relation, self.manager.get_sub_meta_rule_relations("admin", self.ref["id"])["sub_meta_rule_relations"])
+ rules[relation] = list()
+ for rule in sub_rules["rules"][relation]:
+ for cat, cat_func, func_name in (
+ ("subject_categories", self.manager.get_subject_category_scope_dict, "subject_category_scope"),
+ ("action_categories", self.manager.get_action_category_scope_dict, "action_category_scope"),
+ ("object_categories", self.manager.get_object_category_scope_dict, "object_category_scope"),
+ ):
+ for cat_value in sub_meta_rules["sub_meta_rules"][relation][cat]:
+ scope = cat_func(
+ "admin",
+ self.ref["id"],
+ cat_value
+ )
+ a_scope = rule.pop(0)
+ self.assertIn(a_scope, scope[func_name][cat_value])
+
+
+
+
diff --git a/keystone-moon/keystone/tests/moon/unit/test_unit_core_intra_extension_authz.py b/keystone-moon/keystone/tests/moon/unit/test_unit_core_intra_extension_authz.py
new file mode 100644
index 00000000..d08ecf39
--- /dev/null
+++ b/keystone-moon/keystone/tests/moon/unit/test_unit_core_intra_extension_authz.py
@@ -0,0 +1,861 @@
+# Copyright 2015 Open Platform for NFV Project, Inc. and its contributors
+# This software is distributed under the terms and conditions of the 'Apache-2.0'
+# license which can be found in the file 'LICENSE' in this package distribution
+# or at 'http://www.apache.org/licenses/LICENSE-2.0'.
+
+"""Unit tests for core IntraExtensionAuthzManager"""
+
+import json
+import os
+import uuid
+from oslo_config import cfg
+from keystone.tests import unit as tests
+from keystone.contrib.moon.core import IntraExtensionAdminManager, IntraExtensionAuthzManager
+from keystone.tests.unit.ksfixtures import database
+from keystone import resource
+from keystone.contrib.moon.exception import *
+from keystone.tests.unit import default_fixtures
+from keystone.contrib.moon.core import LogManager, TenantManager
+
+CONF = cfg.CONF
+
+USER_ADMIN = {
+ 'name': 'admin',
+ 'domain_id': "default",
+ 'password': 'admin'
+}
+
+IE = {
+ "name": "test IE",
+ "policymodel": "policy_rbac_authz",
+ "description": "a simple description."
+}
+
+class TestIntraExtensionAuthzManagerAuthz(tests.TestCase):
+
+ def setUp(self):
+ self.useFixture(database.Database())
+ super(TestIntraExtensionAuthzManager, self).setUp()
+ self.load_backends()
+ self.load_fixtures(default_fixtures)
+ self.manager = IntraExtensionAuthzManager()
+ self.admin_manager = IntraExtensionAdminManager()
+
+ def __get_key_from_value(self, value, values_dict):
+ return filter(lambda v: v[1] == value, values_dict.iteritems())[0][0]
+
+ def load_extra_backends(self):
+ return {
+ "moonlog_api": LogManager(),
+ "tenant_api": TenantManager(),
+ # "resource_api": resource.Manager(),
+ }
+
+ def config_overrides(self):
+ super(TestIntraExtensionAuthzManager, self).config_overrides()
+ self.policy_directory = '../../../examples/moon/policies'
+ self.config_fixture.config(
+ group='moon',
+ intraextension_driver='keystone.contrib.moon.backends.sql.IntraExtensionConnector')
+ self.config_fixture.config(
+ group='moon',
+ policy_directory=self.policy_directory)
+
+
+class TestIntraExtensionAuthzManager(tests.TestCase):
+
+ def setUp(self):
+ self.useFixture(database.Database())
+ super(TestIntraExtensionAuthzManager, self).setUp()
+ self.load_backends()
+ self.load_fixtures(default_fixtures)
+ self.manager = IntraExtensionAuthzManager()
+ self.admin_manager = IntraExtensionAdminManager()
+
+ def __get_key_from_value(self, value, values_dict):
+ return filter(lambda v: v[1] == value, values_dict.iteritems())[0][0]
+
+ def load_extra_backends(self):
+ return {
+ "moonlog_api": LogManager(),
+ "tenant_api": TenantManager(),
+ # "resource_api": resource.Manager(),
+ }
+
+ def config_overrides(self):
+ super(TestIntraExtensionAuthzManager, self).config_overrides()
+ self.policy_directory = '../../../examples/moon/policies'
+ self.config_fixture.config(
+ group='moon',
+ intraextension_driver='keystone.contrib.moon.backends.sql.IntraExtensionConnector')
+ self.config_fixture.config(
+ group='moon',
+ policy_directory=self.policy_directory)
+
+ def create_intra_extension(self, policy_model="policy_rbac_authz"):
+ # Create the admin user because IntraExtension needs it
+ self.admin = self.identity_api.create_user(USER_ADMIN)
+ IE["policymodel"] = policy_model
+ self.ref = self.admin_manager.load_intra_extension(IE)
+ self.assertIsInstance(self.ref, dict)
+ self.create_tenant(self.ref["id"])
+
+ def create_tenant(self, authz_uuid):
+ tenant = {
+ "id": uuid.uuid4().hex,
+ "name": "TestIntraExtensionAuthzManager",
+ "enabled": True,
+ "description": "",
+ "domain_id": "default"
+ }
+ project = self.resource_api.create_project(tenant["id"], tenant)
+ mapping = self.tenant_api.set_tenant_dict(project["id"], project["name"], authz_uuid, None)
+ self.assertIsInstance(mapping, dict)
+ self.assertIn("authz", mapping)
+ self.assertEqual(mapping["authz"], authz_uuid)
+ return mapping
+
+ def create_user(self, username="TestIntraExtensionAuthzManagerUser"):
+ user = {
+ "id": uuid.uuid4().hex,
+ "name": username,
+ "enabled": True,
+ "description": "",
+ "domain_id": "default"
+ }
+ _user = self.identity_api.create_user(user)
+ return _user
+
+ def delete_admin_intra_extension(self):
+ self.assertRaises(
+ AuthIntraExtensionModificationNotAuthorized,
+ self.manager.delete_intra_extension,
+ self.ref["id"])
+
+ def test_subjects(self):
+ self.create_intra_extension()
+
+ subjects = self.manager.get_subject_dict("admin", self.ref["id"])
+ self.assertIsInstance(subjects, dict)
+ self.assertIn("subjects", subjects)
+ self.assertIn("id", subjects)
+ self.assertIn("intra_extension_uuid", subjects)
+ self.assertEqual(self.ref["id"], subjects["intra_extension_uuid"])
+ self.assertIsInstance(subjects["subjects"], dict)
+
+ new_subject = self.create_user()
+ new_subjects = dict()
+ new_subjects[new_subject["id"]] = new_subject["name"]
+ self.assertRaises(
+ AuthIntraExtensionModificationNotAuthorized,
+ self.manager.set_subject_dict,
+ "admin", self.ref["id"], new_subjects)
+
+ # Delete the new subject
+ self.assertRaises(
+ AuthIntraExtensionModificationNotAuthorized,
+ self.manager.del_subject,
+ "admin", self.ref["id"], new_subject["id"])
+
+ # Add a particular subject
+ self.assertRaises(
+ AuthIntraExtensionModificationNotAuthorized,
+ self.manager.add_subject_dict,
+ "admin", self.ref["id"], new_subject["id"])
+
+ def test_objects(self):
+ self.create_intra_extension()
+
+ objects = self.manager.get_object_dict("admin", self.ref["id"])
+ self.assertIsInstance(objects, dict)
+ self.assertIn("objects", objects)
+ self.assertIn("id", objects)
+ self.assertIn("intra_extension_uuid", objects)
+ self.assertEqual(self.ref["id"], objects["intra_extension_uuid"])
+ self.assertIsInstance(objects["objects"], dict)
+
+ new_object = self.create_user()
+ new_objects = dict()
+ new_objects[new_object["id"]] = new_object["name"]
+ self.assertRaises(
+ AuthIntraExtensionModificationNotAuthorized,
+ self.manager.set_object_dict,
+ "admin", self.ref["id"], new_object["id"])
+
+ # Delete the new object
+ self.assertRaises(
+ AuthIntraExtensionModificationNotAuthorized,
+ self.manager.del_object,
+ "admin", self.ref["id"], new_object["id"])
+
+ # Add a particular object
+ self.assertRaises(
+ AuthIntraExtensionModificationNotAuthorized,
+ self.manager.add_object_dict,
+ "admin", self.ref["id"], new_object["name"])
+
+ def test_actions(self):
+ self.create_intra_extension()
+
+ actions = self.manager.get_action_dict("admin", self.ref["id"])
+ self.assertIsInstance(actions, dict)
+ self.assertIn("actions", actions)
+ self.assertIn("id", actions)
+ self.assertIn("intra_extension_uuid", actions)
+ self.assertEqual(self.ref["id"], actions["intra_extension_uuid"])
+ self.assertIsInstance(actions["actions"], dict)
+
+ new_action = self.create_user()
+ new_actions = dict()
+ new_actions[new_action["id"]] = new_action["name"]
+ self.assertRaises(
+ AuthIntraExtensionModificationNotAuthorized,
+ self.manager.set_action_dict,
+ "admin", self.ref["id"], new_actions)
+
+ # Delete the new action
+ self.assertRaises(
+ AuthIntraExtensionModificationNotAuthorized,
+ self.manager.del_action,
+ "admin", self.ref["id"], new_action["id"])
+
+ # Add a particular action
+ self.assertRaises(
+ AuthIntraExtensionModificationNotAuthorized,
+ self.manager.add_action_dict,
+ "admin", self.ref["id"], new_action["id"])
+
+ def test_subject_categories(self):
+ self.create_intra_extension()
+
+ subject_categories = self.manager.get_subject_category_dict("admin", self.ref["id"])
+ self.assertIsInstance(subject_categories, dict)
+ self.assertIn("subject_categories", subject_categories)
+ self.assertIn("id", subject_categories)
+ self.assertIn("intra_extension_uuid", subject_categories)
+ self.assertEqual(self.ref["id"], subject_categories["intra_extension_uuid"])
+ self.assertIsInstance(subject_categories["subject_categories"], dict)
+
+ new_subject_category = {"id": uuid.uuid4().hex, "name": "subject_category_test"}
+ new_subject_categories = dict()
+ new_subject_categories[new_subject_category["id"]] = new_subject_category["name"]
+ self.assertRaises(
+ AuthIntraExtensionModificationNotAuthorized,
+ self.manager.set_subject_category_dict,
+ "admin", self.ref["id"], new_subject_categories)
+
+ # Delete the new subject_category
+ self.assertRaises(
+ AuthIntraExtensionModificationNotAuthorized,
+ self.manager.del_subject_category,
+ "admin", self.ref["id"], new_subject_category["id"])
+
+ # Add a particular subject_category
+ self.assertRaises(
+ AuthIntraExtensionModificationNotAuthorized,
+ self.manager.add_subject_category_dict,
+ "admin", self.ref["id"], new_subject_category["name"])
+
+ def test_object_categories(self):
+ self.create_intra_extension()
+
+ object_categories = self.manager.get_object_category_dict("admin", self.ref["id"])
+ self.assertIsInstance(object_categories, dict)
+ self.assertIn("object_categories", object_categories)
+ self.assertIn("id", object_categories)
+ self.assertIn("intra_extension_uuid", object_categories)
+ self.assertEqual(self.ref["id"], object_categories["intra_extension_uuid"])
+ self.assertIsInstance(object_categories["object_categories"], dict)
+
+ new_object_category = {"id": uuid.uuid4().hex, "name": "object_category_test"}
+ new_object_categories = dict()
+ new_object_categories[new_object_category["id"]] = new_object_category["name"]
+ self.assertRaises(
+ AuthIntraExtensionModificationNotAuthorized,
+ self.manager.set_object_category_dict,
+ "admin", self.ref["id"], new_object_categories)
+
+ # Delete the new object_category
+ self.assertRaises(
+ AuthIntraExtensionModificationNotAuthorized,
+ self.manager.del_object_category,
+ "admin", self.ref["id"], new_object_category["id"])
+
+ # Add a particular object_category
+ self.assertRaises(
+ AuthIntraExtensionModificationNotAuthorized,
+ self.manager.add_object_category_dict,
+ "admin", self.ref["id"], new_object_category["name"])
+
+ def test_action_categories(self):
+ self.create_intra_extension()
+
+ action_categories = self.manager.get_action_category_dict("admin", self.ref["id"])
+ self.assertIsInstance(action_categories, dict)
+ self.assertIn("action_categories", action_categories)
+ self.assertIn("id", action_categories)
+ self.assertIn("intra_extension_uuid", action_categories)
+ self.assertEqual(self.ref["id"], action_categories["intra_extension_uuid"])
+ self.assertIsInstance(action_categories["action_categories"], dict)
+
+ new_action_category = {"id": uuid.uuid4().hex, "name": "action_category_test"}
+ new_action_categories = dict()
+ new_action_categories[new_action_category["id"]] = new_action_category["name"]
+ self.assertRaises(
+ AuthIntraExtensionModificationNotAuthorized,
+ self.manager.set_action_category_dict,
+ "admin", self.ref["id"], new_action_categories)
+
+ # Delete the new action_category
+ self.assertRaises(
+ AuthIntraExtensionModificationNotAuthorized,
+ self.manager.del_action_category,
+ "admin", self.ref["id"], new_action_category["id"])
+
+ # Add a particular action_category
+ self.assertRaises(
+ AuthIntraExtensionModificationNotAuthorized,
+ self.manager.add_action_category_dict,
+ "admin", self.ref["id"], new_action_category["name"])
+
+ def test_subject_category_scope(self):
+ self.create_intra_extension()
+
+ subject_categories = self.admin_manager.set_subject_category_dict(
+ "admin",
+ self.ref["id"],
+ {
+ uuid.uuid4().hex: "admin",
+ uuid.uuid4().hex: "dev",
+ }
+ )
+
+ for subject_category in subject_categories["subject_categories"]:
+ subject_category_scope = self.manager.get_subject_category_scope_dict(
+ "admin",
+ self.ref["id"],
+ subject_category)
+ self.assertIsInstance(subject_category_scope, dict)
+ self.assertIn("subject_category_scope", subject_category_scope)
+ self.assertIn("id", subject_category_scope)
+ self.assertIn("intra_extension_uuid", subject_category_scope)
+ self.assertEqual(self.ref["id"], subject_category_scope["intra_extension_uuid"])
+ self.assertIsInstance(subject_category_scope["subject_category_scope"], dict)
+
+ new_subject_category_scope = dict()
+ new_subject_category_scope_uuid = uuid.uuid4().hex
+ new_subject_category_scope[new_subject_category_scope_uuid] = "new_subject_category_scope"
+ self.assertRaises(
+ AuthIntraExtensionModificationNotAuthorized,
+ self.manager.set_subject_category_scope_dict,
+ "admin", self.ref["id"], subject_category, new_subject_category_scope)
+
+ # Delete the new subject_category_scope
+ self.assertRaises(
+ AuthIntraExtensionModificationNotAuthorized,
+ self.manager.del_subject_category_scope,
+ "admin", self.ref["id"], subject_category, new_subject_category_scope_uuid)
+
+ # Add a particular subject_category_scope
+ self.assertRaises(
+ AuthIntraExtensionModificationNotAuthorized,
+ self.manager.add_subject_category_scope_dict,
+ "admin", self.ref["id"], subject_category, new_subject_category_scope[new_subject_category_scope_uuid])
+
+ def test_object_category_scope(self):
+ self.create_intra_extension()
+
+ object_categories = self.admin_manager.set_object_category_dict(
+ "admin",
+ self.ref["id"],
+ {
+ uuid.uuid4().hex: "id",
+ uuid.uuid4().hex: "domain",
+ }
+ )
+
+ for object_category in object_categories["object_categories"]:
+ object_category_scope = self.manager.get_object_category_scope_dict(
+ "admin",
+ self.ref["id"],
+ object_category)
+ self.assertIsInstance(object_category_scope, dict)
+ self.assertIn("object_category_scope", object_category_scope)
+ self.assertIn("id", object_category_scope)
+ self.assertIn("intra_extension_uuid", object_category_scope)
+ self.assertEqual(self.ref["id"], object_category_scope["intra_extension_uuid"])
+ self.assertIsInstance(object_category_scope["object_category_scope"], dict)
+
+ new_object_category_scope = dict()
+ new_object_category_scope_uuid = uuid.uuid4().hex
+ new_object_category_scope[new_object_category_scope_uuid] = "new_object_category_scope"
+ self.assertRaises(
+ AuthIntraExtensionModificationNotAuthorized,
+ self.manager.set_object_category_scope_dict,
+ "admin", self.ref["id"], object_category, new_object_category_scope)
+
+ # Delete the new object_category_scope
+ self.assertRaises(
+ AuthIntraExtensionModificationNotAuthorized,
+ self.manager.del_object_category_scope,
+ "admin", self.ref["id"], object_category, new_object_category_scope_uuid)
+
+ # Add a particular object_category_scope
+ self.assertRaises(
+ AuthIntraExtensionModificationNotAuthorized,
+ self.manager.add_object_category_scope_dict,
+ "admin", self.ref["id"], object_category, new_object_category_scope[new_object_category_scope_uuid])
+
+ def test_action_category_scope(self):
+ self.create_intra_extension()
+
+ action_categories = self.admin_manager.set_action_category_dict(
+ "admin",
+ self.ref["id"],
+ {
+ uuid.uuid4().hex: "compute",
+ uuid.uuid4().hex: "identity",
+ }
+ )
+
+ for action_category in action_categories["action_categories"]:
+ action_category_scope = self.manager.get_action_category_scope_dict(
+ "admin",
+ self.ref["id"],
+ action_category)
+ self.assertIsInstance(action_category_scope, dict)
+ self.assertIn("action_category_scope", action_category_scope)
+ self.assertIn("id", action_category_scope)
+ self.assertIn("intra_extension_uuid", action_category_scope)
+ self.assertEqual(self.ref["id"], action_category_scope["intra_extension_uuid"])
+ self.assertIsInstance(action_category_scope["action_category_scope"], dict)
+
+ new_action_category_scope = dict()
+ new_action_category_scope_uuid = uuid.uuid4().hex
+ new_action_category_scope[new_action_category_scope_uuid] = "new_action_category_scope"
+ self.assertRaises(
+ AuthIntraExtensionModificationNotAuthorized,
+ self.manager.set_action_category_scope_dict,
+ "admin", self.ref["id"], action_category, new_action_category_scope)
+
+ # Delete the new action_category_scope
+ self.assertRaises(
+ AuthIntraExtensionModificationNotAuthorized,
+ self.manager.del_action_category_scope,
+ "admin", self.ref["id"], action_category, new_action_category_scope_uuid)
+
+ # Add a particular action_category_scope
+ self.assertRaises(
+ AuthIntraExtensionModificationNotAuthorized,
+ self.manager.add_action_category_scope_dict,
+ "admin", self.ref["id"], action_category, new_action_category_scope[new_action_category_scope_uuid])
+
+ def test_subject_category_assignment(self):
+ self.create_intra_extension()
+
+ new_subject = self.create_user()
+ new_subjects = dict()
+ new_subjects[new_subject["id"]] = new_subject["name"]
+ subjects = self.admin_manager.set_subject_dict("admin", self.ref["id"], new_subjects)
+
+ new_subject_category_uuid = uuid.uuid4().hex
+ new_subject_category_value = "role"
+ subject_categories = self.admin_manager.set_subject_category_dict(
+ "admin",
+ self.ref["id"],
+ {
+ new_subject_category_uuid: new_subject_category_value
+ }
+ )
+
+ for subject_category in subject_categories["subject_categories"]:
+ subject_category_scope = self.admin_manager.get_subject_category_scope_dict(
+ "admin",
+ self.ref["id"],
+ subject_category)
+ self.assertIsInstance(subject_category_scope, dict)
+ self.assertIn("subject_category_scope", subject_category_scope)
+ self.assertIn("id", subject_category_scope)
+ self.assertIn("intra_extension_uuid", subject_category_scope)
+ self.assertEqual(self.ref["id"], subject_category_scope["intra_extension_uuid"])
+ self.assertIsInstance(subject_category_scope["subject_category_scope"], dict)
+
+ new_subject_category_scope = dict()
+ new_subject_category_scope_uuid = uuid.uuid4().hex
+ new_subject_category_scope[new_subject_category_scope_uuid] = "admin"
+ subject_category_scope = self.admin_manager.set_subject_category_scope_dict(
+ "admin",
+ self.ref["id"],
+ subject_category,
+ new_subject_category_scope)
+ self.assertIsInstance(subject_category_scope, dict)
+ self.assertIn("subject_category_scope", subject_category_scope)
+ self.assertIn("id", subject_category_scope)
+ self.assertIn("intra_extension_uuid", subject_category_scope)
+ self.assertEqual(self.ref["id"], subject_category_scope["intra_extension_uuid"])
+ self.assertIn(new_subject_category_scope[new_subject_category_scope_uuid],
+ subject_category_scope["subject_category_scope"][subject_category].values())
+
+ new_subject_category_scope2 = dict()
+ new_subject_category_scope2_uuid = uuid.uuid4().hex
+ new_subject_category_scope2[new_subject_category_scope2_uuid] = "dev"
+ subject_category_scope = self.admin_manager.set_subject_category_scope_dict(
+ "admin",
+ self.ref["id"],
+ subject_category,
+ new_subject_category_scope2)
+ self.assertIsInstance(subject_category_scope, dict)
+ self.assertIn("subject_category_scope", subject_category_scope)
+ self.assertIn("id", subject_category_scope)
+ self.assertIn("intra_extension_uuid", subject_category_scope)
+ self.assertEqual(self.ref["id"], subject_category_scope["intra_extension_uuid"])
+ self.assertIn(new_subject_category_scope2[new_subject_category_scope2_uuid],
+ subject_category_scope["subject_category_scope"][subject_category].values())
+
+ subject_category_assignments = self.manager.get_subject_category_assignment_dict(
+ "admin",
+ self.ref["id"],
+ new_subject["id"]
+ )
+ self.assertIsInstance(subject_category_assignments, dict)
+ self.assertIn("subject_category_assignments", subject_category_assignments)
+ self.assertIn("id", subject_category_assignments)
+ self.assertIn("intra_extension_uuid", subject_category_assignments)
+ self.assertEqual(self.ref["id"], subject_category_assignments["intra_extension_uuid"])
+ self.assertEqual({}, subject_category_assignments["subject_category_assignments"][new_subject["id"]])
+
+ self.assertRaises(
+ AuthIntraExtensionModificationNotAuthorized,
+ self.manager.set_subject_category_assignment_dict,
+ "admin", self.ref["id"], new_subject["id"],
+ {
+ new_subject_category_uuid: [new_subject_category_scope_uuid, new_subject_category_scope2_uuid],
+ })
+
+ self.assertRaises(
+ AuthIntraExtensionModificationNotAuthorized,
+ self.manager.del_subject_category_assignment,
+ "admin", self.ref["id"], new_subject["id"],
+ new_subject_category_uuid,
+ new_subject_category_scope_uuid)
+
+ self.assertRaises(
+ AuthIntraExtensionModificationNotAuthorized,
+ self.manager.add_subject_category_assignment_dict,
+ "admin", self.ref["id"], new_subject["id"],
+ new_subject_category_uuid,
+ new_subject_category_scope_uuid)
+
+ def test_object_category_assignment(self):
+ self.create_intra_extension()
+
+ new_object = self.create_user()
+ new_objects = dict()
+ new_objects[new_object["id"]] = new_object["name"]
+ objects = self.admin_manager.set_object_dict("admin", self.ref["id"], new_objects)
+
+ new_object_category_uuid = uuid.uuid4().hex
+ new_object_category_value = "role"
+ object_categories = self.admin_manager.set_object_category_dict(
+ "admin",
+ self.ref["id"],
+ {
+ new_object_category_uuid: new_object_category_value
+ }
+ )
+
+ for object_category in object_categories["object_categories"]:
+ object_category_scope = self.admin_manager.get_object_category_scope_dict(
+ "admin",
+ self.ref["id"],
+ object_category)
+ self.assertIsInstance(object_category_scope, dict)
+ self.assertIn("object_category_scope", object_category_scope)
+ self.assertIn("id", object_category_scope)
+ self.assertIn("intra_extension_uuid", object_category_scope)
+ self.assertEqual(self.ref["id"], object_category_scope["intra_extension_uuid"])
+ self.assertIsInstance(object_category_scope["object_category_scope"], dict)
+
+ new_object_category_scope = dict()
+ new_object_category_scope_uuid = uuid.uuid4().hex
+ new_object_category_scope[new_object_category_scope_uuid] = "admin"
+ object_category_scope = self.admin_manager.set_object_category_scope_dict(
+ "admin",
+ self.ref["id"],
+ object_category,
+ new_object_category_scope)
+ self.assertIsInstance(object_category_scope, dict)
+ self.assertIn("object_category_scope", object_category_scope)
+ self.assertIn("id", object_category_scope)
+ self.assertIn("intra_extension_uuid", object_category_scope)
+ self.assertEqual(self.ref["id"], object_category_scope["intra_extension_uuid"])
+ self.assertIn(new_object_category_scope[new_object_category_scope_uuid],
+ object_category_scope["object_category_scope"][object_category].values())
+
+ new_object_category_scope2 = dict()
+ new_object_category_scope2_uuid = uuid.uuid4().hex
+ new_object_category_scope2[new_object_category_scope2_uuid] = "dev"
+ object_category_scope = self.admin_manager.set_object_category_scope_dict(
+ "admin",
+ self.ref["id"],
+ object_category,
+ new_object_category_scope2)
+ self.assertIsInstance(object_category_scope, dict)
+ self.assertIn("object_category_scope", object_category_scope)
+ self.assertIn("id", object_category_scope)
+ self.assertIn("intra_extension_uuid", object_category_scope)
+ self.assertEqual(self.ref["id"], object_category_scope["intra_extension_uuid"])
+ self.assertIn(new_object_category_scope2[new_object_category_scope2_uuid],
+ object_category_scope["object_category_scope"][object_category].values())
+
+ object_category_assignments = self.manager.get_object_category_assignment_dict(
+ "admin",
+ self.ref["id"],
+ new_object["id"]
+ )
+ self.assertIsInstance(object_category_assignments, dict)
+ self.assertIn("object_category_assignments", object_category_assignments)
+ self.assertIn("id", object_category_assignments)
+ self.assertIn("intra_extension_uuid", object_category_assignments)
+ self.assertEqual(self.ref["id"], object_category_assignments["intra_extension_uuid"])
+ self.assertEqual({}, object_category_assignments["object_category_assignments"][new_object["id"]])
+
+ self.assertRaises(
+ AuthIntraExtensionModificationNotAuthorized,
+ self.manager.set_object_category_assignment_dict,
+ "admin", self.ref["id"], new_object["id"],
+ {
+ new_object_category_uuid: [new_object_category_scope_uuid, new_object_category_scope2_uuid],
+ })
+
+ self.assertRaises(
+ AuthIntraExtensionModificationNotAuthorized,
+ self.manager.del_object_category_assignment,
+ "admin", self.ref["id"], new_object["id"],
+ new_object_category_uuid,
+ new_object_category_scope_uuid)
+
+ self.assertRaises(
+ AuthIntraExtensionModificationNotAuthorized,
+ self.manager.add_object_category_assignment_dict,
+ "admin", self.ref["id"], new_object["id"],
+ new_object_category_uuid,
+ new_object_category_scope_uuid)
+
+ def test_action_category_assignment(self):
+ self.create_intra_extension()
+
+ new_action = self.create_user()
+ new_actions = dict()
+ new_actions[new_action["id"]] = new_action["name"]
+ actions = self.admin_manager.set_action_dict("admin", self.ref["id"], new_actions)
+
+ new_action_category_uuid = uuid.uuid4().hex
+ new_action_category_value = "role"
+ action_categories = self.admin_manager.set_action_category_dict(
+ "admin",
+ self.ref["id"],
+ {
+ new_action_category_uuid: new_action_category_value
+ }
+ )
+
+ for action_category in action_categories["action_categories"]:
+ action_category_scope = self.admin_manager.get_action_category_scope_dict(
+ "admin",
+ self.ref["id"],
+ action_category)
+ self.assertIsInstance(action_category_scope, dict)
+ self.assertIn("action_category_scope", action_category_scope)
+ self.assertIn("id", action_category_scope)
+ self.assertIn("intra_extension_uuid", action_category_scope)
+ self.assertEqual(self.ref["id"], action_category_scope["intra_extension_uuid"])
+ self.assertIsInstance(action_category_scope["action_category_scope"], dict)
+
+ new_action_category_scope = dict()
+ new_action_category_scope_uuid = uuid.uuid4().hex
+ new_action_category_scope[new_action_category_scope_uuid] = "admin"
+ action_category_scope = self.admin_manager.set_action_category_scope_dict(
+ "admin",
+ self.ref["id"],
+ action_category,
+ new_action_category_scope)
+ self.assertIsInstance(action_category_scope, dict)
+ self.assertIn("action_category_scope", action_category_scope)
+ self.assertIn("id", action_category_scope)
+ self.assertIn("intra_extension_uuid", action_category_scope)
+ self.assertEqual(self.ref["id"], action_category_scope["intra_extension_uuid"])
+ self.assertIn(new_action_category_scope[new_action_category_scope_uuid],
+ action_category_scope["action_category_scope"][action_category].values())
+
+ new_action_category_scope2 = dict()
+ new_action_category_scope2_uuid = uuid.uuid4().hex
+ new_action_category_scope2[new_action_category_scope2_uuid] = "dev"
+ action_category_scope = self.admin_manager.set_action_category_scope_dict(
+ "admin",
+ self.ref["id"],
+ action_category,
+ new_action_category_scope2)
+ self.assertIsInstance(action_category_scope, dict)
+ self.assertIn("action_category_scope", action_category_scope)
+ self.assertIn("id", action_category_scope)
+ self.assertIn("intra_extension_uuid", action_category_scope)
+ self.assertEqual(self.ref["id"], action_category_scope["intra_extension_uuid"])
+ self.assertIn(new_action_category_scope2[new_action_category_scope2_uuid],
+ action_category_scope["action_category_scope"][action_category].values())
+
+ action_category_assignments = self.manager.get_action_category_assignment_dict(
+ "admin",
+ self.ref["id"],
+ new_action["id"]
+ )
+ self.assertIsInstance(action_category_assignments, dict)
+ self.assertIn("action_category_assignments", action_category_assignments)
+ self.assertIn("id", action_category_assignments)
+ self.assertIn("intra_extension_uuid", action_category_assignments)
+ self.assertEqual(self.ref["id"], action_category_assignments["intra_extension_uuid"])
+ self.assertEqual({}, action_category_assignments["action_category_assignments"][new_action["id"]])
+
+ self.assertRaises(
+ AuthIntraExtensionModificationNotAuthorized,
+ self.manager.set_action_category_assignment_dict,
+ "admin", self.ref["id"], new_action["id"],
+ {
+ new_action_category_uuid: [new_action_category_scope_uuid, new_action_category_scope2_uuid],
+ })
+
+ self.assertRaises(
+ AuthIntraExtensionModificationNotAuthorized,
+ self.manager.del_action_category_assignment,
+ "admin", self.ref["id"], new_action["id"],
+ new_action_category_uuid,
+ new_action_category_scope_uuid)
+
+ self.assertRaises(
+ AuthIntraExtensionModificationNotAuthorized,
+ self.manager.add_action_category_assignment_dict,
+ "admin", self.ref["id"], new_action["id"],
+ new_action_category_uuid,
+ new_action_category_scope_uuid)
+
+ def test_sub_meta_rules(self):
+ self.create_intra_extension()
+
+ aggregation_algorithms = self.manager.get_aggregation_algorithms("admin", self.ref["id"])
+ self.assertIsInstance(aggregation_algorithms, dict)
+ self.assertIsInstance(aggregation_algorithms["aggregation_algorithms"], list)
+ self.assertIn("and_true_aggregation", aggregation_algorithms["aggregation_algorithms"])
+ self.assertIn("test_aggregation", aggregation_algorithms["aggregation_algorithms"])
+
+ aggregation_algorithm = self.manager.get_aggregation_algorithm("admin", self.ref["id"])
+ self.assertIsInstance(aggregation_algorithm, dict)
+ self.assertIn("aggregation", aggregation_algorithm)
+ self.assertIn(aggregation_algorithm["aggregation"], aggregation_algorithms["aggregation_algorithms"])
+
+ _aggregation_algorithm = list(aggregation_algorithms["aggregation_algorithms"])
+ _aggregation_algorithm.remove(aggregation_algorithm["aggregation"])
+ self.assertRaises(
+ AuthIntraExtensionModificationNotAuthorized,
+ self.manager.set_aggregation_algorithm,
+ "admin", self.ref["id"], _aggregation_algorithm[0])
+
+ sub_meta_rules = self.manager.get_sub_meta_rule("admin", self.ref["id"])
+ self.assertIsInstance(sub_meta_rules, dict)
+ self.assertIn("sub_meta_rules", sub_meta_rules)
+ sub_meta_rules_conf = json.load(open(os.path.join(self.policy_directory, self.ref["model"], "metarule.json")))
+ metarule = dict()
+ categories = {
+ "subject_categories": self.manager.get_subject_category_dict("admin", self.ref["id"]),
+ "object_categories": self.manager.get_object_category_dict("admin", self.ref["id"]),
+ "action_categories": self.manager.get_action_category_dict("admin", self.ref["id"])
+ }
+ for relation in sub_meta_rules_conf["sub_meta_rules"]:
+ metarule[relation] = dict()
+ for item in ("subject_categories", "object_categories", "action_categories"):
+ metarule[relation][item] = list()
+ for element in sub_meta_rules_conf["sub_meta_rules"][relation][item]:
+ metarule[relation][item].append(self.__get_key_from_value(
+ element,
+ categories[item][item]
+ ))
+
+ for relation in sub_meta_rules["sub_meta_rules"]:
+ self.assertIn(relation, metarule)
+ for item in ("subject_categories", "object_categories", "action_categories"):
+ self.assertEqual(
+ sub_meta_rules["sub_meta_rules"][relation][item],
+ metarule[relation][item]
+ )
+
+ new_subject_category = {"id": uuid.uuid4().hex, "name": "subject_category_test"}
+ # Add a particular subject_category
+ data = self.admin_manager.add_subject_category_dict(
+ "admin",
+ self.ref["id"],
+ new_subject_category["name"])
+ new_subject_category["id"] = data["subject_category"]["uuid"]
+ subject_categories = self.manager.get_subject_category_dict(
+ "admin",
+ self.ref["id"])
+ self.assertIsInstance(subject_categories, dict)
+ self.assertIn("subject_categories", subject_categories)
+ self.assertIn("id", subject_categories)
+ self.assertIn("intra_extension_uuid", subject_categories)
+ self.assertEqual(self.ref["id"], subject_categories["intra_extension_uuid"])
+ self.assertIn(new_subject_category["id"], subject_categories["subject_categories"])
+ metarule[relation]["subject_categories"].append(new_subject_category["id"])
+ self.assertRaises(
+ AuthIntraExtensionModificationNotAuthorized,
+ self.manager.set_sub_meta_rule,
+ "admin", self.ref["id"], metarule)
+
+ def test_sub_rules(self):
+ self.create_intra_extension()
+
+ sub_meta_rules = self.manager.get_sub_meta_rule("admin", self.ref["id"])
+ self.assertIsInstance(sub_meta_rules, dict)
+ self.assertIn("sub_meta_rules", sub_meta_rules)
+
+ sub_rules = self.manager.get_sub_rules("admin", self.ref["id"])
+ self.assertIsInstance(sub_rules, dict)
+ self.assertIn("rules", sub_rules)
+ rules = dict()
+ for relation in sub_rules["rules"]:
+ self.assertIn(relation, self.manager.get_sub_meta_rule_relations("admin", self.ref["id"])["sub_meta_rule_relations"])
+ rules[relation] = list()
+ for rule in sub_rules["rules"][relation]:
+ for cat, cat_func, func_name in (
+ ("subject_categories", self.manager.get_subject_category_scope_dict, "subject_category_scope"),
+ ("action_categories", self.manager.get_action_category_scope_dict, "action_category_scope"),
+ ("object_categories", self.manager.get_object_category_scope_dict, "object_category_scope"),
+ ):
+ for cat_value in sub_meta_rules["sub_meta_rules"][relation][cat]:
+ scope = cat_func(
+ "admin",
+ self.ref["id"],
+ cat_value
+ )
+ a_scope = rule.pop(0)
+ self.assertIn(a_scope, scope[func_name][cat_value])
+
+ # add a new subrule
+
+ relation = sub_rules["rules"].keys()[0]
+ sub_rule = []
+ for cat, cat_func, func_name in (
+ ("subject_categories", self.manager.get_subject_category_scope_dict, "subject_category_scope"),
+ ("action_categories", self.manager.get_action_category_scope_dict, "action_category_scope"),
+ ("object_categories", self.manager.get_object_category_scope_dict, "object_category_scope"),
+ ):
+ for cat_value in sub_meta_rules["sub_meta_rules"][relation][cat]:
+ scope = cat_func(
+ "admin",
+ self.ref["id"],
+ cat_value
+ )
+ sub_rule.append(scope[func_name][cat_value].keys()[0])
+
+ self.assertRaises(
+ AuthIntraExtensionModificationNotAuthorized,
+ self.manager.set_sub_rule,
+ "admin", self.ref["id"], relation, sub_rule)
diff --git a/keystone-moon/keystone/tests/moon/unit/test_unit_core_log.py b/keystone-moon/keystone/tests/moon/unit/test_unit_core_log.py
new file mode 100644
index 00000000..1b678d53
--- /dev/null
+++ b/keystone-moon/keystone/tests/moon/unit/test_unit_core_log.py
@@ -0,0 +1,4 @@
+# Copyright 2015 Open Platform for NFV Project, Inc. and its contributors
+# This software is distributed under the terms and conditions of the 'Apache-2.0'
+# license which can be found in the file 'LICENSE' in this package distribution
+# or at 'http://www.apache.org/licenses/LICENSE-2.0'.
diff --git a/keystone-moon/keystone/tests/moon/unit/test_unit_core_tenant.py b/keystone-moon/keystone/tests/moon/unit/test_unit_core_tenant.py
new file mode 100644
index 00000000..d9c17bd5
--- /dev/null
+++ b/keystone-moon/keystone/tests/moon/unit/test_unit_core_tenant.py
@@ -0,0 +1,162 @@
+# Copyright 2015 Open Platform for NFV Project, Inc. and its contributors
+# This software is distributed under the terms and conditions of the 'Apache-2.0'
+# license which can be found in the file 'LICENSE' in this package distribution
+# or at 'http://www.apache.org/licenses/LICENSE-2.0'.
+
+"""Unit tests for core tenant."""
+
+import uuid
+from oslo_config import cfg
+from keystone.tests import unit as tests
+from keystone.contrib.moon.core import TenantManager
+from keystone.tests.unit.ksfixtures import database
+from keystone.contrib.moon.exception import *
+from keystone.tests.unit import default_fixtures
+from keystone.contrib.moon.core import LogManager
+
+CONF = cfg.CONF
+
+
+class TestTenantManager(tests.TestCase):
+
+ def setUp(self):
+ self.useFixture(database.Database())
+ super(TestTenantManager, self).setUp()
+ self.load_backends()
+ self.load_fixtures(default_fixtures)
+ self.manager = TenantManager()
+
+ def load_extra_backends(self):
+ return {
+ "moonlog_api": LogManager()
+ }
+
+ def config_overrides(self):
+ super(TestTenantManager, self).config_overrides()
+ self.config_fixture.config(
+ group='moon',
+ tenant_driver='keystone.contrib.moon.backends.sql.TenantConnector')
+
+ def test_add_tenant(self):
+ _uuid = uuid.uuid4().hex
+ new_mapping = {
+ _uuid: {
+ "name": uuid.uuid4().hex,
+ "authz": uuid.uuid4().hex,
+ "admin": uuid.uuid4().hex,
+ }
+ }
+ data = self.manager.set_tenant_dict(
+ tenant_uuid=_uuid,
+ name=new_mapping[_uuid]["name"],
+ authz_extension_uuid=new_mapping[_uuid]["authz"],
+ admin_extension_uuid=new_mapping[_uuid]["admin"]
+ )
+ self.assertEquals(_uuid, data["id"])
+ self.assertEquals(data["name"], new_mapping[_uuid]["name"])
+ self.assertEquals(data["authz"], new_mapping[_uuid]["authz"])
+ self.assertEquals(data["admin"], new_mapping[_uuid]["admin"])
+ data = self.manager.get_tenant_dict()
+ self.assertNotEqual(data, {})
+ data = self.manager.get_tenant_uuid(new_mapping[_uuid]["authz"])
+ self.assertEquals(_uuid, data)
+ data = self.manager.get_tenant_uuid(new_mapping[_uuid]["admin"])
+ self.assertEquals(_uuid, data)
+ data = self.manager.get_admin_extension_uuid(new_mapping[_uuid]["authz"])
+ self.assertEquals(new_mapping[_uuid]["admin"], data)
+
+ def test_tenant_list_empty(self):
+ data = self.manager.get_tenant_dict()
+ self.assertEqual(data, {})
+
+ def test_set_tenant_name(self):
+ _uuid = uuid.uuid4().hex
+ new_mapping = {
+ _uuid: {
+ "name": uuid.uuid4().hex,
+ "authz": uuid.uuid4().hex,
+ "admin": uuid.uuid4().hex,
+ }
+ }
+ data = self.manager.set_tenant_dict(
+ tenant_uuid=_uuid,
+ name=new_mapping[_uuid]["name"],
+ authz_extension_uuid=new_mapping[_uuid]["authz"],
+ admin_extension_uuid=new_mapping[_uuid]["admin"]
+ )
+ self.assertEquals(_uuid, data["id"])
+ self.assertEquals(data["name"], new_mapping[_uuid]["name"])
+ data = self.manager.set_tenant_name(_uuid, "new name")
+ self.assertEquals(_uuid, data["id"])
+ self.assertEquals(data["name"], "new name")
+ data = self.manager.get_tenant_name(_uuid)
+ self.assertEquals(data, "new name")
+
+ def test_delete_tenant(self):
+ _uuid = uuid.uuid4().hex
+ new_mapping = {
+ _uuid: {
+ "name": uuid.uuid4().hex,
+ "authz": uuid.uuid4().hex,
+ "admin": uuid.uuid4().hex,
+ }
+ }
+ data = self.manager.set_tenant_dict(
+ tenant_uuid=_uuid,
+ name=new_mapping[_uuid]["name"],
+ authz_extension_uuid=new_mapping[_uuid]["authz"],
+ admin_extension_uuid=new_mapping[_uuid]["admin"]
+ )
+ self.assertEquals(_uuid, data["id"])
+ self.assertEquals(data["name"], new_mapping[_uuid]["name"])
+ self.assertEquals(data["authz"], new_mapping[_uuid]["authz"])
+ self.assertEquals(data["admin"], new_mapping[_uuid]["admin"])
+ data = self.manager.get_tenant_dict()
+ self.assertNotEqual(data, {})
+ self.manager.delete(new_mapping[_uuid]["authz"])
+ data = self.manager.get_tenant_dict()
+ self.assertEqual(data, {})
+
+ def test_get_extension_uuid(self):
+ _uuid = uuid.uuid4().hex
+ new_mapping = {
+ _uuid: {
+ "name": uuid.uuid4().hex,
+ "authz": uuid.uuid4().hex,
+ "admin": uuid.uuid4().hex,
+ }
+ }
+ data = self.manager.set_tenant_dict(
+ tenant_uuid=_uuid,
+ name=new_mapping[_uuid]["name"],
+ authz_extension_uuid=new_mapping[_uuid]["authz"],
+ admin_extension_uuid=new_mapping[_uuid]["admin"]
+ )
+ self.assertEquals(_uuid, data["id"])
+ data = self.manager.get_extension_uuid(_uuid)
+ self.assertEqual(data, new_mapping[_uuid]["authz"])
+ data = self.manager.get_extension_uuid(_uuid, "admin")
+ self.assertEqual(data, new_mapping[_uuid]["admin"])
+
+ def test_unkown_tenant_uuid(self):
+ self.assertRaises(TenantNotFoundError, self.manager.get_tenant_name, uuid.uuid4().hex)
+ self.assertRaises(TenantNotFoundError, self.manager.set_tenant_name, uuid.uuid4().hex, "new name")
+ self.assertRaises(TenantNotFoundError, self.manager.get_extension_uuid, uuid.uuid4().hex)
+ _uuid = uuid.uuid4().hex
+ new_mapping = {
+ _uuid: {
+ "name": uuid.uuid4().hex,
+ "authz": uuid.uuid4().hex,
+ "admin": uuid.uuid4().hex,
+ }
+ }
+ data = self.manager.set_tenant_dict(
+ tenant_uuid=_uuid,
+ name=new_mapping[_uuid]["name"],
+ authz_extension_uuid=new_mapping[_uuid]["authz"],
+ admin_extension_uuid=""
+ )
+ self.assertEquals(_uuid, data["id"])
+ self.assertRaises(IntraExtensionNotFound, self.manager.get_extension_uuid, _uuid, "admin")
+ self.assertRaises(TenantNotFoundError, self.manager.get_tenant_uuid, uuid.uuid4().hex)
+ # self.assertRaises(AdminIntraExtensionNotFound, self.manager.get_admin_extension_uuid, uuid.uuid4().hex)
diff --git a/keystone-moon/keystone/tests/unit/__init__.py b/keystone-moon/keystone/tests/unit/__init__.py
new file mode 100644
index 00000000..c97ce253
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/__init__.py
@@ -0,0 +1,41 @@
+# Copyright 2013 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import oslo_i18n
+import six
+
+
+if six.PY3:
+ # NOTE(dstanek): This block will monkey patch libraries that are not
+ # yet supported in Python3. We do this that that it is possible to
+ # execute any tests at all. Without monkey patching modules the
+ # tests will fail with import errors.
+
+ import sys
+ from unittest import mock # noqa: our import detection is naive?
+
+ sys.modules['eventlet'] = mock.Mock()
+ sys.modules['eventlet.green'] = mock.Mock()
+ sys.modules['eventlet.wsgi'] = mock.Mock()
+ sys.modules['oslo'].messaging = mock.Mock()
+ sys.modules['pycadf'] = mock.Mock()
+ sys.modules['paste'] = mock.Mock()
+
+# NOTE(dstanek): oslo_i18n.enable_lazy() must be called before
+# keystone.i18n._() is called to ensure it has the desired lazy lookup
+# behavior. This includes cases, like keystone.exceptions, where
+# keystone.i18n._() is called at import time.
+oslo_i18n.enable_lazy()
+
+from keystone.tests.unit.core import * # noqa
diff --git a/keystone-moon/keystone/tests/unit/backend/__init__.py b/keystone-moon/keystone/tests/unit/backend/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/backend/__init__.py
diff --git a/keystone-moon/keystone/tests/unit/backend/core_ldap.py b/keystone-moon/keystone/tests/unit/backend/core_ldap.py
new file mode 100644
index 00000000..9d6b23e1
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/backend/core_ldap.py
@@ -0,0 +1,161 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import ldap
+
+from oslo_config import cfg
+
+from keystone.common import cache
+from keystone.common import ldap as common_ldap
+from keystone.common.ldap import core as common_ldap_core
+from keystone.common import sql
+from keystone.tests import unit as tests
+from keystone.tests.unit import default_fixtures
+from keystone.tests.unit import fakeldap
+from keystone.tests.unit.ksfixtures import database
+
+
+CONF = cfg.CONF
+
+
+def create_group_container(identity_api):
+ # Create the groups base entry (ou=Groups,cn=example,cn=com)
+ group_api = identity_api.driver.group
+ conn = group_api.get_connection()
+ dn = 'ou=Groups,cn=example,cn=com'
+ conn.add_s(dn, [('objectclass', ['organizationalUnit']),
+ ('ou', ['Groups'])])
+
+
+class BaseBackendLdapCommon(object):
+ """Mixin class to set up generic LDAP backends."""
+
+ def setUp(self):
+ super(BaseBackendLdapCommon, self).setUp()
+
+ common_ldap.register_handler('fake://', fakeldap.FakeLdap)
+ self.load_backends()
+ self.load_fixtures(default_fixtures)
+
+ self.addCleanup(common_ldap_core._HANDLERS.clear)
+ self.addCleanup(self.clear_database)
+
+ def _get_domain_fixture(self):
+ """Domains in LDAP are read-only, so just return the static one."""
+ return self.resource_api.get_domain(CONF.identity.default_domain_id)
+
+ def clear_database(self):
+ for shelf in fakeldap.FakeShelves:
+ fakeldap.FakeShelves[shelf].clear()
+
+ def reload_backends(self, domain_id):
+ # Only one backend unless we are using separate domain backends
+ self.load_backends()
+
+ def get_config(self, domain_id):
+ # Only one conf structure unless we are using separate domain backends
+ return CONF
+
+ def config_overrides(self):
+ super(BaseBackendLdapCommon, self).config_overrides()
+ self.config_fixture.config(
+ group='identity',
+ driver='keystone.identity.backends.ldap.Identity')
+
+ def config_files(self):
+ config_files = super(BaseBackendLdapCommon, self).config_files()
+ config_files.append(tests.dirs.tests_conf('backend_ldap.conf'))
+ return config_files
+
+ def get_user_enabled_vals(self, user):
+ user_dn = (
+ self.identity_api.driver.user._id_to_dn_string(user['id']))
+ enabled_attr_name = CONF.ldap.user_enabled_attribute
+
+ ldap_ = self.identity_api.driver.user.get_connection()
+ res = ldap_.search_s(user_dn,
+ ldap.SCOPE_BASE,
+ u'(sn=%s)' % user['name'])
+ if enabled_attr_name in res[0][1]:
+ return res[0][1][enabled_attr_name]
+ else:
+ return None
+
+
+class BaseBackendLdap(object):
+ """Mixin class to set up an all-LDAP configuration."""
+ def setUp(self):
+ # NOTE(dstanek): The database must be setup prior to calling the
+ # parent's setUp. The parent's setUp uses services (like
+ # credentials) that require a database.
+ self.useFixture(database.Database())
+ super(BaseBackendLdap, self).setUp()
+
+ def load_fixtures(self, fixtures):
+ # Override super impl since need to create group container.
+ create_group_container(self.identity_api)
+ super(BaseBackendLdap, self).load_fixtures(fixtures)
+
+
+class BaseBackendLdapIdentitySqlEverythingElse(tests.SQLDriverOverrides):
+ """Mixin base for Identity LDAP, everything else SQL backend tests."""
+
+ def config_files(self):
+ config_files = super(BaseBackendLdapIdentitySqlEverythingElse,
+ self).config_files()
+ config_files.append(tests.dirs.tests_conf('backend_ldap_sql.conf'))
+ return config_files
+
+ def setUp(self):
+ self.useFixture(database.Database())
+ super(BaseBackendLdapIdentitySqlEverythingElse, self).setUp()
+ self.clear_database()
+ self.load_backends()
+ cache.configure_cache_region(cache.REGION)
+ self.engine = sql.get_engine()
+ self.addCleanup(sql.cleanup)
+
+ sql.ModelBase.metadata.create_all(bind=self.engine)
+ self.addCleanup(sql.ModelBase.metadata.drop_all, bind=self.engine)
+
+ self.load_fixtures(default_fixtures)
+ # defaulted by the data load
+ self.user_foo['enabled'] = True
+
+ def config_overrides(self):
+ super(BaseBackendLdapIdentitySqlEverythingElse,
+ self).config_overrides()
+ self.config_fixture.config(
+ group='identity',
+ driver='keystone.identity.backends.ldap.Identity')
+ self.config_fixture.config(
+ group='resource',
+ driver='keystone.resource.backends.sql.Resource')
+ self.config_fixture.config(
+ group='assignment',
+ driver='keystone.assignment.backends.sql.Assignment')
+
+
+class BaseBackendLdapIdentitySqlEverythingElseWithMapping(object):
+ """Mixin base class to test mapping of default LDAP backend.
+
+ The default configuration is not to enable mapping when using a single
+ backend LDAP driver. However, a cloud provider might want to enable
+ the mapping, hence hiding the LDAP IDs from any clients of keystone.
+ Setting backward_compatible_ids to False will enable this mapping.
+
+ """
+ def config_overrides(self):
+ super(BaseBackendLdapIdentitySqlEverythingElseWithMapping,
+ self).config_overrides()
+ self.config_fixture.config(group='identity_mapping',
+ backward_compatible_ids=False)
diff --git a/keystone-moon/keystone/tests/unit/backend/core_sql.py b/keystone-moon/keystone/tests/unit/backend/core_sql.py
new file mode 100644
index 00000000..9cbd858e
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/backend/core_sql.py
@@ -0,0 +1,53 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import sqlalchemy
+
+from keystone.common import sql
+from keystone.tests import unit as tests
+from keystone.tests.unit import default_fixtures
+from keystone.tests.unit.ksfixtures import database
+
+
+class BaseBackendSqlTests(tests.SQLDriverOverrides, tests.TestCase):
+
+ def setUp(self):
+ super(BaseBackendSqlTests, self).setUp()
+ self.useFixture(database.Database())
+ self.load_backends()
+
+ # populate the engine with tables & fixtures
+ self.load_fixtures(default_fixtures)
+ # defaulted by the data load
+ self.user_foo['enabled'] = True
+
+ def config_files(self):
+ config_files = super(BaseBackendSqlTests, self).config_files()
+ config_files.append(tests.dirs.tests_conf('backend_sql.conf'))
+ return config_files
+
+
+class BaseBackendSqlModels(BaseBackendSqlTests):
+
+ def select_table(self, name):
+ table = sqlalchemy.Table(name,
+ sql.ModelBase.metadata,
+ autoload=True)
+ s = sqlalchemy.select([table])
+ return s
+
+ def assertExpectedSchema(self, table, cols):
+ table = self.select_table(table)
+ for col, type_, length in cols:
+ self.assertIsInstance(table.c[col].type, type_)
+ if length:
+ self.assertEqual(length, table.c[col].type.length)
diff --git a/keystone-moon/keystone/tests/unit/backend/domain_config/__init__.py b/keystone-moon/keystone/tests/unit/backend/domain_config/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/backend/domain_config/__init__.py
diff --git a/keystone-moon/keystone/tests/unit/backend/domain_config/core.py b/keystone-moon/keystone/tests/unit/backend/domain_config/core.py
new file mode 100644
index 00000000..da2e9bd9
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/backend/domain_config/core.py
@@ -0,0 +1,523 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import copy
+import uuid
+
+import mock
+from testtools import matchers
+
+from keystone import exception
+
+
+class DomainConfigTests(object):
+
+ def setUp(self):
+ self.domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.resource_api.create_domain(self.domain['id'], self.domain)
+ self.addCleanup(self.clean_up_domain)
+
+ def clean_up_domain(self):
+ # NOTE(henry-nash): Deleting the domain will also delete any domain
+ # configs for this domain.
+ self.domain['enabled'] = False
+ self.resource_api.update_domain(self.domain['id'], self.domain)
+ self.resource_api.delete_domain(self.domain['id'])
+ del self.domain
+
+ def _domain_config_crud(self, sensitive):
+ group = uuid.uuid4().hex
+ option = uuid.uuid4().hex
+ value = uuid.uuid4().hex
+ self.domain_config_api.create_config_option(
+ self.domain['id'], group, option, value, sensitive)
+ res = self.domain_config_api.get_config_option(
+ self.domain['id'], group, option, sensitive)
+ config = {'group': group, 'option': option, 'value': value}
+ self.assertEqual(config, res)
+
+ value = uuid.uuid4().hex
+ self.domain_config_api.update_config_option(
+ self.domain['id'], group, option, value, sensitive)
+ res = self.domain_config_api.get_config_option(
+ self.domain['id'], group, option, sensitive)
+ config = {'group': group, 'option': option, 'value': value}
+ self.assertEqual(config, res)
+
+ self.domain_config_api.delete_config_options(
+ self.domain['id'], group, option, sensitive)
+ self.assertRaises(exception.DomainConfigNotFound,
+ self.domain_config_api.get_config_option,
+ self.domain['id'], group, option, sensitive)
+ # ...and silent if we try to delete it again
+ self.domain_config_api.delete_config_options(
+ self.domain['id'], group, option, sensitive)
+
+ def test_whitelisted_domain_config_crud(self):
+ self._domain_config_crud(sensitive=False)
+
+ def test_sensitive_domain_config_crud(self):
+ self._domain_config_crud(sensitive=True)
+
+ def _list_domain_config(self, sensitive):
+ """Test listing by combination of domain, group & option."""
+
+ config1 = {'group': uuid.uuid4().hex, 'option': uuid.uuid4().hex,
+ 'value': uuid.uuid4().hex}
+ # Put config2 in the same group as config1
+ config2 = {'group': config1['group'], 'option': uuid.uuid4().hex,
+ 'value': uuid.uuid4().hex}
+ config3 = {'group': uuid.uuid4().hex, 'option': uuid.uuid4().hex,
+ 'value': 100}
+ for config in [config1, config2, config3]:
+ self.domain_config_api.create_config_option(
+ self.domain['id'], config['group'], config['option'],
+ config['value'], sensitive)
+
+ # Try listing all items from a domain
+ res = self.domain_config_api.list_config_options(
+ self.domain['id'], sensitive=sensitive)
+ self.assertThat(res, matchers.HasLength(3))
+ for res_entry in res:
+ self.assertIn(res_entry, [config1, config2, config3])
+
+ # Try listing by domain and group
+ res = self.domain_config_api.list_config_options(
+ self.domain['id'], group=config1['group'], sensitive=sensitive)
+ self.assertThat(res, matchers.HasLength(2))
+ for res_entry in res:
+ self.assertIn(res_entry, [config1, config2])
+
+ # Try listing by domain, group and option
+ res = self.domain_config_api.list_config_options(
+ self.domain['id'], group=config2['group'],
+ option=config2['option'], sensitive=sensitive)
+ self.assertThat(res, matchers.HasLength(1))
+ self.assertEqual(config2, res[0])
+
+ def test_list_whitelisted_domain_config_crud(self):
+ self._list_domain_config(False)
+
+ def test_list_sensitive_domain_config_crud(self):
+ self._list_domain_config(True)
+
+ def _delete_domain_configs(self, sensitive):
+ """Test deleting by combination of domain, group & option."""
+
+ config1 = {'group': uuid.uuid4().hex, 'option': uuid.uuid4().hex,
+ 'value': uuid.uuid4().hex}
+ # Put config2 and config3 in the same group as config1
+ config2 = {'group': config1['group'], 'option': uuid.uuid4().hex,
+ 'value': uuid.uuid4().hex}
+ config3 = {'group': config1['group'], 'option': uuid.uuid4().hex,
+ 'value': uuid.uuid4().hex}
+ config4 = {'group': uuid.uuid4().hex, 'option': uuid.uuid4().hex,
+ 'value': uuid.uuid4().hex}
+ for config in [config1, config2, config3, config4]:
+ self.domain_config_api.create_config_option(
+ self.domain['id'], config['group'], config['option'],
+ config['value'], sensitive)
+
+ # Try deleting by domain, group and option
+ res = self.domain_config_api.delete_config_options(
+ self.domain['id'], group=config2['group'],
+ option=config2['option'], sensitive=sensitive)
+ res = self.domain_config_api.list_config_options(
+ self.domain['id'], sensitive=sensitive)
+ self.assertThat(res, matchers.HasLength(3))
+ for res_entry in res:
+ self.assertIn(res_entry, [config1, config3, config4])
+
+ # Try deleting by domain and group
+ res = self.domain_config_api.delete_config_options(
+ self.domain['id'], group=config4['group'], sensitive=sensitive)
+ res = self.domain_config_api.list_config_options(
+ self.domain['id'], sensitive=sensitive)
+ self.assertThat(res, matchers.HasLength(2))
+ for res_entry in res:
+ self.assertIn(res_entry, [config1, config3])
+
+ # Try deleting all items from a domain
+ res = self.domain_config_api.delete_config_options(
+ self.domain['id'], sensitive=sensitive)
+ res = self.domain_config_api.list_config_options(
+ self.domain['id'], sensitive=sensitive)
+ self.assertThat(res, matchers.HasLength(0))
+
+ def test_delete_whitelisted_domain_configs(self):
+ self._delete_domain_configs(False)
+
+ def test_delete_sensitive_domain_configs(self):
+ self._delete_domain_configs(True)
+
+ def _create_domain_config_twice(self, sensitive):
+ """Test conflict error thrown if create the same option twice."""
+
+ config = {'group': uuid.uuid4().hex, 'option': uuid.uuid4().hex,
+ 'value': uuid.uuid4().hex}
+
+ self.domain_config_api.create_config_option(
+ self.domain['id'], config['group'], config['option'],
+ config['value'], sensitive=sensitive)
+ self.assertRaises(exception.Conflict,
+ self.domain_config_api.create_config_option,
+ self.domain['id'], config['group'], config['option'],
+ config['value'], sensitive=sensitive)
+
+ def test_create_whitelisted_domain_config_twice(self):
+ self._create_domain_config_twice(False)
+
+ def test_create_sensitive_domain_config_twice(self):
+ self._create_domain_config_twice(True)
+
+ def test_delete_domain_deletes_configs(self):
+ """Test domain deletion clears the domain configs."""
+
+ domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.resource_api.create_domain(domain['id'], domain)
+ config1 = {'group': uuid.uuid4().hex, 'option': uuid.uuid4().hex,
+ 'value': uuid.uuid4().hex}
+ # Put config2 in the same group as config1
+ config2 = {'group': config1['group'], 'option': uuid.uuid4().hex,
+ 'value': uuid.uuid4().hex}
+ self.domain_config_api.create_config_option(
+ domain['id'], config1['group'], config1['option'],
+ config1['value'])
+ self.domain_config_api.create_config_option(
+ domain['id'], config2['group'], config2['option'],
+ config2['value'], sensitive=True)
+ res = self.domain_config_api.list_config_options(
+ domain['id'])
+ self.assertThat(res, matchers.HasLength(1))
+ res = self.domain_config_api.list_config_options(
+ domain['id'], sensitive=True)
+ self.assertThat(res, matchers.HasLength(1))
+
+ # Now delete the domain
+ domain['enabled'] = False
+ self.resource_api.update_domain(domain['id'], domain)
+ self.resource_api.delete_domain(domain['id'])
+
+ # Check domain configs have also been deleted
+ res = self.domain_config_api.list_config_options(
+ domain['id'])
+ self.assertThat(res, matchers.HasLength(0))
+ res = self.domain_config_api.list_config_options(
+ domain['id'], sensitive=True)
+ self.assertThat(res, matchers.HasLength(0))
+
+ def test_create_domain_config_including_sensitive_option(self):
+ config = {'ldap': {'url': uuid.uuid4().hex,
+ 'user_tree_dn': uuid.uuid4().hex,
+ 'password': uuid.uuid4().hex}}
+ self.domain_config_api.create_config(self.domain['id'], config)
+
+ # password is sensitive, so check that the whitelisted portion and
+ # the sensitive piece have been stored in the appropriate locations.
+ res = self.domain_config_api.get_config(self.domain['id'])
+ config_whitelisted = copy.deepcopy(config)
+ config_whitelisted['ldap'].pop('password')
+ self.assertEqual(config_whitelisted, res)
+ res = self.domain_config_api.get_config_option(
+ self.domain['id'], 'ldap', 'password', sensitive=True)
+ self.assertEqual(config['ldap']['password'], res['value'])
+
+ # Finally, use the non-public API to get back the whole config
+ res = self.domain_config_api.get_config_with_sensitive_info(
+ self.domain['id'])
+ self.assertEqual(config, res)
+
+ def test_get_partial_domain_config(self):
+ config = {'ldap': {'url': uuid.uuid4().hex,
+ 'user_tree_dn': uuid.uuid4().hex,
+ 'password': uuid.uuid4().hex},
+ 'identity': {'driver': uuid.uuid4().hex}}
+ self.domain_config_api.create_config(self.domain['id'], config)
+
+ res = self.domain_config_api.get_config(self.domain['id'],
+ group='identity')
+ config_partial = copy.deepcopy(config)
+ config_partial.pop('ldap')
+ self.assertEqual(config_partial, res)
+ res = self.domain_config_api.get_config(
+ self.domain['id'], group='ldap', option='user_tree_dn')
+ self.assertEqual({'user_tree_dn': config['ldap']['user_tree_dn']}, res)
+ # ...but we should fail to get a sensitive option
+ self.assertRaises(exception.DomainConfigNotFound,
+ self.domain_config_api.get_config, self.domain['id'],
+ group='ldap', option='password')
+
+ def test_delete_partial_domain_config(self):
+ config = {'ldap': {'url': uuid.uuid4().hex,
+ 'user_tree_dn': uuid.uuid4().hex,
+ 'password': uuid.uuid4().hex},
+ 'identity': {'driver': uuid.uuid4().hex}}
+ self.domain_config_api.create_config(self.domain['id'], config)
+
+ self.domain_config_api.delete_config(
+ self.domain['id'], group='identity')
+ config_partial = copy.deepcopy(config)
+ config_partial.pop('identity')
+ config_partial['ldap'].pop('password')
+ res = self.domain_config_api.get_config(self.domain['id'])
+ self.assertEqual(config_partial, res)
+
+ self.domain_config_api.delete_config(
+ self.domain['id'], group='ldap', option='url')
+ config_partial = copy.deepcopy(config_partial)
+ config_partial['ldap'].pop('url')
+ res = self.domain_config_api.get_config(self.domain['id'])
+ self.assertEqual(config_partial, res)
+
+ def test_get_options_not_in_domain_config(self):
+ self.assertRaises(exception.DomainConfigNotFound,
+ self.domain_config_api.get_config, self.domain['id'])
+ config = {'ldap': {'url': uuid.uuid4().hex}}
+
+ self.domain_config_api.create_config(self.domain['id'], config)
+
+ self.assertRaises(exception.DomainConfigNotFound,
+ self.domain_config_api.get_config, self.domain['id'],
+ group='identity')
+ self.assertRaises(exception.DomainConfigNotFound,
+ self.domain_config_api.get_config, self.domain['id'],
+ group='ldap', option='user_tree_dn')
+
+ def test_get_sensitive_config(self):
+ config = {'ldap': {'url': uuid.uuid4().hex,
+ 'user_tree_dn': uuid.uuid4().hex,
+ 'password': uuid.uuid4().hex},
+ 'identity': {'driver': uuid.uuid4().hex}}
+ res = self.domain_config_api.get_config_with_sensitive_info(
+ self.domain['id'])
+ self.assertEqual({}, res)
+ self.domain_config_api.create_config(self.domain['id'], config)
+ res = self.domain_config_api.get_config_with_sensitive_info(
+ self.domain['id'])
+ self.assertEqual(config, res)
+
+ def test_update_partial_domain_config(self):
+ config = {'ldap': {'url': uuid.uuid4().hex,
+ 'user_tree_dn': uuid.uuid4().hex,
+ 'password': uuid.uuid4().hex},
+ 'identity': {'driver': uuid.uuid4().hex}}
+ self.domain_config_api.create_config(self.domain['id'], config)
+
+ # Try updating a group
+ new_config = {'ldap': {'url': uuid.uuid4().hex,
+ 'user_filter': uuid.uuid4().hex}}
+ res = self.domain_config_api.update_config(
+ self.domain['id'], new_config, group='ldap')
+ expected_config = copy.deepcopy(config)
+ expected_config['ldap']['url'] = new_config['ldap']['url']
+ expected_config['ldap']['user_filter'] = (
+ new_config['ldap']['user_filter'])
+ expected_full_config = copy.deepcopy(expected_config)
+ expected_config['ldap'].pop('password')
+ res = self.domain_config_api.get_config(self.domain['id'])
+ self.assertEqual(expected_config, res)
+ # The sensitive option should still existsss
+ res = self.domain_config_api.get_config_with_sensitive_info(
+ self.domain['id'])
+ self.assertEqual(expected_full_config, res)
+
+ # Try updating a single whitelisted option
+ self.domain_config_api.delete_config(self.domain['id'])
+ self.domain_config_api.create_config(self.domain['id'], config)
+ new_config = {'url': uuid.uuid4().hex}
+ res = self.domain_config_api.update_config(
+ self.domain['id'], new_config, group='ldap', option='url')
+
+ # Make sure whitelisted and full config is updated
+ expected_whitelisted_config = copy.deepcopy(config)
+ expected_whitelisted_config['ldap']['url'] = new_config['url']
+ expected_full_config = copy.deepcopy(expected_whitelisted_config)
+ expected_whitelisted_config['ldap'].pop('password')
+ self.assertEqual(expected_whitelisted_config, res)
+ res = self.domain_config_api.get_config(self.domain['id'])
+ self.assertEqual(expected_whitelisted_config, res)
+ res = self.domain_config_api.get_config_with_sensitive_info(
+ self.domain['id'])
+ self.assertEqual(expected_full_config, res)
+
+ # Try updating a single sensitive option
+ self.domain_config_api.delete_config(self.domain['id'])
+ self.domain_config_api.create_config(self.domain['id'], config)
+ new_config = {'password': uuid.uuid4().hex}
+ res = self.domain_config_api.update_config(
+ self.domain['id'], new_config, group='ldap', option='password')
+ # The whitelisted config should not have changed...
+ expected_whitelisted_config = copy.deepcopy(config)
+ expected_full_config = copy.deepcopy(config)
+ expected_whitelisted_config['ldap'].pop('password')
+ self.assertEqual(expected_whitelisted_config, res)
+ res = self.domain_config_api.get_config(self.domain['id'])
+ self.assertEqual(expected_whitelisted_config, res)
+ expected_full_config['ldap']['password'] = new_config['password']
+ res = self.domain_config_api.get_config_with_sensitive_info(
+ self.domain['id'])
+ # ...but the sensitive piece should have.
+ self.assertEqual(expected_full_config, res)
+
+ def test_update_invalid_partial_domain_config(self):
+ config = {'ldap': {'url': uuid.uuid4().hex,
+ 'user_tree_dn': uuid.uuid4().hex,
+ 'password': uuid.uuid4().hex},
+ 'identity': {'driver': uuid.uuid4().hex}}
+ # An extra group, when specifying one group should fail
+ self.assertRaises(exception.InvalidDomainConfig,
+ self.domain_config_api.update_config,
+ self.domain['id'], config, group='ldap')
+ # An extra option, when specifying one option should fail
+ self.assertRaises(exception.InvalidDomainConfig,
+ self.domain_config_api.update_config,
+ self.domain['id'], config['ldap'],
+ group='ldap', option='url')
+
+ # Now try the right number of groups/options, but just not
+ # ones that are in the config provided
+ config = {'ldap': {'user_tree_dn': uuid.uuid4().hex}}
+ self.assertRaises(exception.InvalidDomainConfig,
+ self.domain_config_api.update_config,
+ self.domain['id'], config, group='identity')
+ self.assertRaises(exception.InvalidDomainConfig,
+ self.domain_config_api.update_config,
+ self.domain['id'], config['ldap'], group='ldap',
+ option='url')
+
+ # Now some valid groups/options, but just not ones that are in the
+ # existing config
+ config = {'ldap': {'user_tree_dn': uuid.uuid4().hex}}
+ self.domain_config_api.create_config(self.domain['id'], config)
+ config_wrong_group = {'identity': {'driver': uuid.uuid4().hex}}
+ self.assertRaises(exception.DomainConfigNotFound,
+ self.domain_config_api.update_config,
+ self.domain['id'], config_wrong_group,
+ group='identity')
+ config_wrong_option = {'url': uuid.uuid4().hex}
+ self.assertRaises(exception.DomainConfigNotFound,
+ self.domain_config_api.update_config,
+ self.domain['id'], config_wrong_option,
+ group='ldap', option='url')
+
+ # And finally just some bad groups/options
+ bad_group = uuid.uuid4().hex
+ config = {bad_group: {'user': uuid.uuid4().hex}}
+ self.assertRaises(exception.InvalidDomainConfig,
+ self.domain_config_api.update_config,
+ self.domain['id'], config, group=bad_group,
+ option='user')
+ bad_option = uuid.uuid4().hex
+ config = {'ldap': {bad_option: uuid.uuid4().hex}}
+ self.assertRaises(exception.InvalidDomainConfig,
+ self.domain_config_api.update_config,
+ self.domain['id'], config, group='ldap',
+ option=bad_option)
+
+ def test_create_invalid_domain_config(self):
+ self.assertRaises(exception.InvalidDomainConfig,
+ self.domain_config_api.create_config,
+ self.domain['id'], {})
+ config = {uuid.uuid4().hex: uuid.uuid4().hex}
+ self.assertRaises(exception.InvalidDomainConfig,
+ self.domain_config_api.create_config,
+ self.domain['id'], config)
+ config = {uuid.uuid4().hex: {uuid.uuid4().hex: uuid.uuid4().hex}}
+ self.assertRaises(exception.InvalidDomainConfig,
+ self.domain_config_api.create_config,
+ self.domain['id'], config)
+ config = {'ldap': {uuid.uuid4().hex: uuid.uuid4().hex}}
+ self.assertRaises(exception.InvalidDomainConfig,
+ self.domain_config_api.create_config,
+ self.domain['id'], config)
+ # Try an option that IS in the standard conf, but neither whitelisted
+ # or marked as sensitive
+ config = {'ldap': {'role_tree_dn': uuid.uuid4().hex}}
+ self.assertRaises(exception.InvalidDomainConfig,
+ self.domain_config_api.create_config,
+ self.domain['id'], config)
+
+ def test_delete_invalid_partial_domain_config(self):
+ config = {'ldap': {'url': uuid.uuid4().hex}}
+ self.domain_config_api.create_config(self.domain['id'], config)
+ # Try deleting a group not in the config
+ self.assertRaises(exception.DomainConfigNotFound,
+ self.domain_config_api.delete_config,
+ self.domain['id'], group='identity')
+ # Try deleting an option not in the config
+ self.assertRaises(exception.DomainConfigNotFound,
+ self.domain_config_api.delete_config,
+ self.domain['id'],
+ group='ldap', option='user_tree_dn')
+
+ def test_sensitive_substitution_in_domain_config(self):
+ # Create a config that contains a whitelisted option that requires
+ # substitution of a sensitive option.
+ config = {'ldap': {'url': 'my_url/%(password)s',
+ 'user_tree_dn': uuid.uuid4().hex,
+ 'password': uuid.uuid4().hex},
+ 'identity': {'driver': uuid.uuid4().hex}}
+ self.domain_config_api.create_config(self.domain['id'], config)
+
+ # Read back the config with the internal method and ensure that the
+ # substitution has taken place.
+ res = self.domain_config_api.get_config_with_sensitive_info(
+ self.domain['id'])
+ expected_url = (
+ config['ldap']['url'] % {'password': config['ldap']['password']})
+ self.assertEqual(expected_url, res['ldap']['url'])
+
+ def test_invalid_sensitive_substitution_in_domain_config(self):
+ """Check that invalid substitutions raise warnings."""
+
+ mock_log = mock.Mock()
+
+ invalid_option_config = {
+ 'ldap': {'user_tree_dn': uuid.uuid4().hex,
+ 'password': uuid.uuid4().hex},
+ 'identity': {'driver': uuid.uuid4().hex}}
+
+ for invalid_option in ['my_url/%(passssword)s',
+ 'my_url/%(password',
+ 'my_url/%(password)',
+ 'my_url/%(password)d']:
+ invalid_option_config['ldap']['url'] = invalid_option
+ self.domain_config_api.create_config(
+ self.domain['id'], invalid_option_config)
+
+ with mock.patch('keystone.resource.core.LOG', mock_log):
+ res = self.domain_config_api.get_config_with_sensitive_info(
+ self.domain['id'])
+ mock_log.warn.assert_any_call(mock.ANY)
+ self.assertEqual(
+ invalid_option_config['ldap']['url'], res['ldap']['url'])
+
+ def test_escaped_sequence_in_domain_config(self):
+ """Check that escaped '%(' doesn't get interpreted."""
+
+ mock_log = mock.Mock()
+
+ escaped_option_config = {
+ 'ldap': {'url': 'my_url/%%(password)s',
+ 'user_tree_dn': uuid.uuid4().hex,
+ 'password': uuid.uuid4().hex},
+ 'identity': {'driver': uuid.uuid4().hex}}
+
+ self.domain_config_api.create_config(
+ self.domain['id'], escaped_option_config)
+
+ with mock.patch('keystone.resource.core.LOG', mock_log):
+ res = self.domain_config_api.get_config_with_sensitive_info(
+ self.domain['id'])
+ self.assertFalse(mock_log.warn.called)
+ # The escaping '%' should have been removed
+ self.assertEqual('my_url/%(password)s', res['ldap']['url'])
diff --git a/keystone-moon/keystone/tests/unit/backend/domain_config/test_sql.py b/keystone-moon/keystone/tests/unit/backend/domain_config/test_sql.py
new file mode 100644
index 00000000..6459ede1
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/backend/domain_config/test_sql.py
@@ -0,0 +1,41 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+
+from keystone.common import sql
+from keystone.tests.unit.backend import core_sql
+from keystone.tests.unit.backend.domain_config import core
+
+
+class SqlDomainConfigModels(core_sql.BaseBackendSqlModels):
+
+ def test_whitelisted_model(self):
+ cols = (('domain_id', sql.String, 64),
+ ('group', sql.String, 255),
+ ('option', sql.String, 255),
+ ('value', sql.JsonBlob, None))
+ self.assertExpectedSchema('whitelisted_config', cols)
+
+ def test_sensitive_model(self):
+ cols = (('domain_id', sql.String, 64),
+ ('group', sql.String, 255),
+ ('option', sql.String, 255),
+ ('value', sql.JsonBlob, None))
+ self.assertExpectedSchema('sensitive_config', cols)
+
+
+class SqlDomainConfig(core_sql.BaseBackendSqlTests, core.DomainConfigTests):
+ def setUp(self):
+ super(SqlDomainConfig, self).setUp()
+ # core.DomainConfigTests is effectively a mixin class, so make sure we
+ # call its setup
+ core.DomainConfigTests.setUp(self)
diff --git a/keystone-moon/keystone/tests/unit/backend/role/__init__.py b/keystone-moon/keystone/tests/unit/backend/role/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/backend/role/__init__.py
diff --git a/keystone-moon/keystone/tests/unit/backend/role/core.py b/keystone-moon/keystone/tests/unit/backend/role/core.py
new file mode 100644
index 00000000..f6e47fe9
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/backend/role/core.py
@@ -0,0 +1,130 @@
+# Copyright 2012 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import copy
+import uuid
+
+from keystone import exception
+from keystone.tests import unit as tests
+from keystone.tests.unit import default_fixtures
+
+
+class RoleTests(object):
+
+ def test_get_role_404(self):
+ self.assertRaises(exception.RoleNotFound,
+ self.role_api.get_role,
+ uuid.uuid4().hex)
+
+ def test_create_duplicate_role_name_fails(self):
+ role = {'id': 'fake1',
+ 'name': 'fake1name'}
+ self.role_api.create_role('fake1', role)
+ role['id'] = 'fake2'
+ self.assertRaises(exception.Conflict,
+ self.role_api.create_role,
+ 'fake2',
+ role)
+
+ def test_rename_duplicate_role_name_fails(self):
+ role1 = {
+ 'id': 'fake1',
+ 'name': 'fake1name'
+ }
+ role2 = {
+ 'id': 'fake2',
+ 'name': 'fake2name'
+ }
+ self.role_api.create_role('fake1', role1)
+ self.role_api.create_role('fake2', role2)
+ role1['name'] = 'fake2name'
+ self.assertRaises(exception.Conflict,
+ self.role_api.update_role,
+ 'fake1',
+ role1)
+
+ def test_role_crud(self):
+ role = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.role_api.create_role(role['id'], role)
+ role_ref = self.role_api.get_role(role['id'])
+ role_ref_dict = {x: role_ref[x] for x in role_ref}
+ self.assertDictEqual(role_ref_dict, role)
+
+ role['name'] = uuid.uuid4().hex
+ updated_role_ref = self.role_api.update_role(role['id'], role)
+ role_ref = self.role_api.get_role(role['id'])
+ role_ref_dict = {x: role_ref[x] for x in role_ref}
+ self.assertDictEqual(role_ref_dict, role)
+ self.assertDictEqual(role_ref_dict, updated_role_ref)
+
+ self.role_api.delete_role(role['id'])
+ self.assertRaises(exception.RoleNotFound,
+ self.role_api.get_role,
+ role['id'])
+
+ def test_update_role_404(self):
+ role = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.assertRaises(exception.RoleNotFound,
+ self.role_api.update_role,
+ role['id'],
+ role)
+
+ def test_list_roles(self):
+ roles = self.role_api.list_roles()
+ self.assertEqual(len(default_fixtures.ROLES), len(roles))
+ role_ids = set(role['id'] for role in roles)
+ expected_role_ids = set(role['id'] for role in default_fixtures.ROLES)
+ self.assertEqual(expected_role_ids, role_ids)
+
+ @tests.skip_if_cache_disabled('role')
+ def test_cache_layer_role_crud(self):
+ role = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ role_id = role['id']
+ # Create role
+ self.role_api.create_role(role_id, role)
+ role_ref = self.role_api.get_role(role_id)
+ updated_role_ref = copy.deepcopy(role_ref)
+ updated_role_ref['name'] = uuid.uuid4().hex
+ # Update role, bypassing the role api manager
+ self.role_api.driver.update_role(role_id, updated_role_ref)
+ # Verify get_role still returns old ref
+ self.assertDictEqual(role_ref, self.role_api.get_role(role_id))
+ # Invalidate Cache
+ self.role_api.get_role.invalidate(self.role_api, role_id)
+ # Verify get_role returns the new role_ref
+ self.assertDictEqual(updated_role_ref,
+ self.role_api.get_role(role_id))
+ # Update role back to original via the assignment api manager
+ self.role_api.update_role(role_id, role_ref)
+ # Verify get_role returns the original role ref
+ self.assertDictEqual(role_ref, self.role_api.get_role(role_id))
+ # Delete role bypassing the role api manager
+ self.role_api.driver.delete_role(role_id)
+ # Verify get_role still returns the role_ref
+ self.assertDictEqual(role_ref, self.role_api.get_role(role_id))
+ # Invalidate cache
+ self.role_api.get_role.invalidate(self.role_api, role_id)
+ # Verify RoleNotFound is now raised
+ self.assertRaises(exception.RoleNotFound,
+ self.role_api.get_role,
+ role_id)
+ # recreate role
+ self.role_api.create_role(role_id, role)
+ self.role_api.get_role(role_id)
+ # delete role via the assignment api manager
+ self.role_api.delete_role(role_id)
+ # verity RoleNotFound is now raised
+ self.assertRaises(exception.RoleNotFound,
+ self.role_api.get_role,
+ role_id)
diff --git a/keystone-moon/keystone/tests/unit/backend/role/test_ldap.py b/keystone-moon/keystone/tests/unit/backend/role/test_ldap.py
new file mode 100644
index 00000000..ba4b7c6e
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/backend/role/test_ldap.py
@@ -0,0 +1,161 @@
+# -*- coding: utf-8 -*-
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import uuid
+
+from oslo_config import cfg
+
+from keystone import exception
+from keystone.tests import unit as tests
+from keystone.tests.unit.backend import core_ldap
+from keystone.tests.unit.backend.role import core as core_role
+from keystone.tests.unit import default_fixtures
+
+
+CONF = cfg.CONF
+
+
+class LdapRoleCommon(core_ldap.BaseBackendLdapCommon, core_role.RoleTests):
+ """Tests that should be run in every LDAP configuration.
+
+ Include additional tests that are unique to LDAP (or need to be overridden)
+ which should be run for all the various LDAP configurations we test.
+
+ """
+ pass
+
+
+class LdapRole(LdapRoleCommon, core_ldap.BaseBackendLdap, tests.TestCase):
+ """Test in an all-LDAP configuration.
+
+ Include additional tests that are unique to LDAP (or need to be overridden)
+ which only need to be run in a basic LDAP configurations.
+
+ """
+ def test_configurable_allowed_role_actions(self):
+ role = {'id': u'fäké1', 'name': u'fäké1'}
+ self.role_api.create_role(u'fäké1', role)
+ role_ref = self.role_api.get_role(u'fäké1')
+ self.assertEqual(u'fäké1', role_ref['id'])
+
+ role['name'] = u'fäké2'
+ self.role_api.update_role(u'fäké1', role)
+
+ self.role_api.delete_role(u'fäké1')
+ self.assertRaises(exception.RoleNotFound,
+ self.role_api.get_role,
+ u'fäké1')
+
+ def test_configurable_forbidden_role_actions(self):
+ self.config_fixture.config(
+ group='ldap', role_allow_create=False, role_allow_update=False,
+ role_allow_delete=False)
+ self.load_backends()
+
+ role = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.assertRaises(exception.ForbiddenAction,
+ self.role_api.create_role,
+ role['id'],
+ role)
+
+ self.role_member['name'] = uuid.uuid4().hex
+ self.assertRaises(exception.ForbiddenAction,
+ self.role_api.update_role,
+ self.role_member['id'],
+ self.role_member)
+
+ self.assertRaises(exception.ForbiddenAction,
+ self.role_api.delete_role,
+ self.role_member['id'])
+
+ def test_role_filter(self):
+ role_ref = self.role_api.get_role(self.role_member['id'])
+ self.assertDictEqual(role_ref, self.role_member)
+
+ self.config_fixture.config(group='ldap',
+ role_filter='(CN=DOES_NOT_MATCH)')
+ self.load_backends()
+ # NOTE(morganfainberg): CONF.ldap.role_filter will not be
+ # dynamically changed at runtime. This invalidate is a work-around for
+ # the expectation that it is safe to change config values in tests that
+ # could affect what the drivers would return up to the manager. This
+ # solves this assumption when working with aggressive (on-create)
+ # cache population.
+ self.role_api.get_role.invalidate(self.role_api,
+ self.role_member['id'])
+ self.assertRaises(exception.RoleNotFound,
+ self.role_api.get_role,
+ self.role_member['id'])
+
+ def test_role_attribute_mapping(self):
+ self.config_fixture.config(group='ldap', role_name_attribute='ou')
+ self.clear_database()
+ self.load_backends()
+ self.load_fixtures(default_fixtures)
+ # NOTE(morganfainberg): CONF.ldap.role_name_attribute will not be
+ # dynamically changed at runtime. This invalidate is a work-around for
+ # the expectation that it is safe to change config values in tests that
+ # could affect what the drivers would return up to the manager. This
+ # solves this assumption when working with aggressive (on-create)
+ # cache population.
+ self.role_api.get_role.invalidate(self.role_api,
+ self.role_member['id'])
+ role_ref = self.role_api.get_role(self.role_member['id'])
+ self.assertEqual(self.role_member['id'], role_ref['id'])
+ self.assertEqual(self.role_member['name'], role_ref['name'])
+
+ self.config_fixture.config(group='ldap', role_name_attribute='sn')
+ self.load_backends()
+ # NOTE(morganfainberg): CONF.ldap.role_name_attribute will not be
+ # dynamically changed at runtime. This invalidate is a work-around for
+ # the expectation that it is safe to change config values in tests that
+ # could affect what the drivers would return up to the manager. This
+ # solves this assumption when working with aggressive (on-create)
+ # cache population.
+ self.role_api.get_role.invalidate(self.role_api,
+ self.role_member['id'])
+ role_ref = self.role_api.get_role(self.role_member['id'])
+ self.assertEqual(self.role_member['id'], role_ref['id'])
+ self.assertNotIn('name', role_ref)
+
+ def test_role_attribute_ignore(self):
+ self.config_fixture.config(group='ldap',
+ role_attribute_ignore=['name'])
+ self.clear_database()
+ self.load_backends()
+ self.load_fixtures(default_fixtures)
+ # NOTE(morganfainberg): CONF.ldap.role_attribute_ignore will not be
+ # dynamically changed at runtime. This invalidate is a work-around for
+ # the expectation that it is safe to change config values in tests that
+ # could affect what the drivers would return up to the manager. This
+ # solves this assumption when working with aggressive (on-create)
+ # cache population.
+ self.role_api.get_role.invalidate(self.role_api,
+ self.role_member['id'])
+ role_ref = self.role_api.get_role(self.role_member['id'])
+ self.assertEqual(self.role_member['id'], role_ref['id'])
+ self.assertNotIn('name', role_ref)
+
+
+class LdapIdentitySqlEverythingElseRole(
+ core_ldap.BaseBackendLdapIdentitySqlEverythingElse, LdapRoleCommon,
+ tests.TestCase):
+ """Test Identity in LDAP, Everything else in SQL."""
+ pass
+
+
+class LdapIdentitySqlEverythingElseWithMappingRole(
+ LdapIdentitySqlEverythingElseRole,
+ core_ldap.BaseBackendLdapIdentitySqlEverythingElseWithMapping):
+ """Test ID mapping of default LDAP backend."""
+ pass
diff --git a/keystone-moon/keystone/tests/unit/backend/role/test_sql.py b/keystone-moon/keystone/tests/unit/backend/role/test_sql.py
new file mode 100644
index 00000000..79ff148a
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/backend/role/test_sql.py
@@ -0,0 +1,40 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import uuid
+
+from keystone.common import sql
+from keystone import exception
+from keystone.tests.unit.backend import core_sql
+from keystone.tests.unit.backend.role import core
+
+
+class SqlRoleModels(core_sql.BaseBackendSqlModels):
+
+ def test_role_model(self):
+ cols = (('id', sql.String, 64),
+ ('name', sql.String, 255))
+ self.assertExpectedSchema('role', cols)
+
+
+class SqlRole(core_sql.BaseBackendSqlTests, core.RoleTests):
+
+ def test_create_null_role_name(self):
+ role = {'id': uuid.uuid4().hex,
+ 'name': None}
+ self.assertRaises(exception.UnexpectedError,
+ self.role_api.create_role,
+ role['id'],
+ role)
+ self.assertRaises(exception.RoleNotFound,
+ self.role_api.get_role,
+ role['id'])
diff --git a/keystone-moon/keystone/tests/unit/catalog/__init__.py b/keystone-moon/keystone/tests/unit/catalog/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/catalog/__init__.py
diff --git a/keystone-moon/keystone/tests/unit/catalog/test_core.py b/keystone-moon/keystone/tests/unit/catalog/test_core.py
new file mode 100644
index 00000000..99a34280
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/catalog/test_core.py
@@ -0,0 +1,74 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from oslo_config import cfg
+import testtools
+
+from keystone.catalog import core
+from keystone import exception
+
+
+CONF = cfg.CONF
+
+
+class FormatUrlTests(testtools.TestCase):
+
+ def test_successful_formatting(self):
+ url_template = ('http://$(public_bind_host)s:$(admin_port)d/'
+ '$(tenant_id)s/$(user_id)s')
+ values = {'public_bind_host': 'server', 'admin_port': 9090,
+ 'tenant_id': 'A', 'user_id': 'B'}
+ actual_url = core.format_url(url_template, values)
+
+ expected_url = 'http://server:9090/A/B'
+ self.assertEqual(actual_url, expected_url)
+
+ def test_raises_malformed_on_missing_key(self):
+ self.assertRaises(exception.MalformedEndpoint,
+ core.format_url,
+ "http://$(public_bind_host)s/$(public_port)d",
+ {"public_bind_host": "1"})
+
+ def test_raises_malformed_on_wrong_type(self):
+ self.assertRaises(exception.MalformedEndpoint,
+ core.format_url,
+ "http://$(public_bind_host)d",
+ {"public_bind_host": "something"})
+
+ def test_raises_malformed_on_incomplete_format(self):
+ self.assertRaises(exception.MalformedEndpoint,
+ core.format_url,
+ "http://$(public_bind_host)",
+ {"public_bind_host": "1"})
+
+ def test_formatting_a_non_string(self):
+ def _test(url_template):
+ self.assertRaises(exception.MalformedEndpoint,
+ core.format_url,
+ url_template,
+ {})
+
+ _test(None)
+ _test(object())
+
+ def test_substitution_with_key_not_allowed(self):
+ # If the url template contains a substitution that's not in the allowed
+ # list then MalformedEndpoint is raised.
+ # For example, admin_token isn't allowed.
+ url_template = ('http://$(public_bind_host)s:$(public_port)d/'
+ '$(tenant_id)s/$(user_id)s/$(admin_token)s')
+ values = {'public_bind_host': 'server', 'public_port': 9090,
+ 'tenant_id': 'A', 'user_id': 'B', 'admin_token': 'C'}
+ self.assertRaises(exception.MalformedEndpoint,
+ core.format_url,
+ url_template,
+ values)
diff --git a/keystone-moon/keystone/tests/unit/common/__init__.py b/keystone-moon/keystone/tests/unit/common/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/common/__init__.py
diff --git a/keystone-moon/keystone/tests/unit/common/test_base64utils.py b/keystone-moon/keystone/tests/unit/common/test_base64utils.py
new file mode 100644
index 00000000..b0b75578
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/common/test_base64utils.py
@@ -0,0 +1,208 @@
+# Copyright 2013 Red Hat, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from keystone.common import base64utils
+from keystone.tests import unit as tests
+
+base64_alphabet = ('ABCDEFGHIJKLMNOPQRSTUVWXYZ'
+ 'abcdefghijklmnopqrstuvwxyz'
+ '0123456789'
+ '+/=') # includes pad char
+
+base64url_alphabet = ('ABCDEFGHIJKLMNOPQRSTUVWXYZ'
+ 'abcdefghijklmnopqrstuvwxyz'
+ '0123456789'
+ '-_=') # includes pad char
+
+
+class TestValid(tests.BaseTestCase):
+ def test_valid_base64(self):
+ self.assertTrue(base64utils.is_valid_base64('+/=='))
+ self.assertTrue(base64utils.is_valid_base64('+/+='))
+ self.assertTrue(base64utils.is_valid_base64('+/+/'))
+
+ self.assertFalse(base64utils.is_valid_base64('-_=='))
+ self.assertFalse(base64utils.is_valid_base64('-_-='))
+ self.assertFalse(base64utils.is_valid_base64('-_-_'))
+
+ self.assertTrue(base64utils.is_valid_base64('abcd'))
+ self.assertFalse(base64utils.is_valid_base64('abcde'))
+ self.assertFalse(base64utils.is_valid_base64('abcde=='))
+ self.assertFalse(base64utils.is_valid_base64('abcdef'))
+ self.assertTrue(base64utils.is_valid_base64('abcdef=='))
+ self.assertFalse(base64utils.is_valid_base64('abcdefg'))
+ self.assertTrue(base64utils.is_valid_base64('abcdefg='))
+ self.assertTrue(base64utils.is_valid_base64('abcdefgh'))
+
+ self.assertFalse(base64utils.is_valid_base64('-_=='))
+
+ def test_valid_base64url(self):
+ self.assertFalse(base64utils.is_valid_base64url('+/=='))
+ self.assertFalse(base64utils.is_valid_base64url('+/+='))
+ self.assertFalse(base64utils.is_valid_base64url('+/+/'))
+
+ self.assertTrue(base64utils.is_valid_base64url('-_=='))
+ self.assertTrue(base64utils.is_valid_base64url('-_-='))
+ self.assertTrue(base64utils.is_valid_base64url('-_-_'))
+
+ self.assertTrue(base64utils.is_valid_base64url('abcd'))
+ self.assertFalse(base64utils.is_valid_base64url('abcde'))
+ self.assertFalse(base64utils.is_valid_base64url('abcde=='))
+ self.assertFalse(base64utils.is_valid_base64url('abcdef'))
+ self.assertTrue(base64utils.is_valid_base64url('abcdef=='))
+ self.assertFalse(base64utils.is_valid_base64url('abcdefg'))
+ self.assertTrue(base64utils.is_valid_base64url('abcdefg='))
+ self.assertTrue(base64utils.is_valid_base64url('abcdefgh'))
+
+ self.assertTrue(base64utils.is_valid_base64url('-_=='))
+
+
+class TestBase64Padding(tests.BaseTestCase):
+
+ def test_filter(self):
+ self.assertEqual('', base64utils.filter_formatting(''))
+ self.assertEqual('', base64utils.filter_formatting(' '))
+ self.assertEqual('a', base64utils.filter_formatting('a'))
+ self.assertEqual('a', base64utils.filter_formatting(' a'))
+ self.assertEqual('a', base64utils.filter_formatting('a '))
+ self.assertEqual('ab', base64utils.filter_formatting('ab'))
+ self.assertEqual('ab', base64utils.filter_formatting(' ab'))
+ self.assertEqual('ab', base64utils.filter_formatting('ab '))
+ self.assertEqual('ab', base64utils.filter_formatting('a b'))
+ self.assertEqual('ab', base64utils.filter_formatting(' a b'))
+ self.assertEqual('ab', base64utils.filter_formatting('a b '))
+ self.assertEqual('ab', base64utils.filter_formatting('a\nb\n '))
+
+ text = ('ABCDEFGHIJKLMNOPQRSTUVWXYZ'
+ 'abcdefghijklmnopqrstuvwxyz'
+ '0123456789'
+ '+/=')
+ self.assertEqual(base64_alphabet,
+ base64utils.filter_formatting(text))
+
+ text = (' ABCDEFGHIJKLMNOPQRSTUVWXYZ\n'
+ ' abcdefghijklmnopqrstuvwxyz\n'
+ '\t\f\r'
+ ' 0123456789\n'
+ ' +/=')
+ self.assertEqual(base64_alphabet,
+ base64utils.filter_formatting(text))
+ self.assertEqual(base64url_alphabet,
+ base64utils.base64_to_base64url(base64_alphabet))
+
+ text = ('ABCDEFGHIJKLMNOPQRSTUVWXYZ'
+ 'abcdefghijklmnopqrstuvwxyz'
+ '0123456789'
+ '-_=')
+ self.assertEqual(base64url_alphabet,
+ base64utils.filter_formatting(text))
+
+ text = (' ABCDEFGHIJKLMNOPQRSTUVWXYZ\n'
+ ' abcdefghijklmnopqrstuvwxyz\n'
+ '\t\f\r'
+ ' 0123456789\n'
+ '-_=')
+ self.assertEqual(base64url_alphabet,
+ base64utils.filter_formatting(text))
+
+ def test_alphabet_conversion(self):
+ self.assertEqual(base64url_alphabet,
+ base64utils.base64_to_base64url(base64_alphabet))
+
+ self.assertEqual(base64_alphabet,
+ base64utils.base64url_to_base64(base64url_alphabet))
+
+ def test_is_padded(self):
+ self.assertTrue(base64utils.base64_is_padded('ABCD'))
+ self.assertTrue(base64utils.base64_is_padded('ABC='))
+ self.assertTrue(base64utils.base64_is_padded('AB=='))
+
+ self.assertTrue(base64utils.base64_is_padded('1234ABCD'))
+ self.assertTrue(base64utils.base64_is_padded('1234ABC='))
+ self.assertTrue(base64utils.base64_is_padded('1234AB=='))
+
+ self.assertFalse(base64utils.base64_is_padded('ABC'))
+ self.assertFalse(base64utils.base64_is_padded('AB'))
+ self.assertFalse(base64utils.base64_is_padded('A'))
+ self.assertFalse(base64utils.base64_is_padded(''))
+
+ self.assertRaises(base64utils.InvalidBase64Error,
+ base64utils.base64_is_padded, '=')
+
+ self.assertRaises(base64utils.InvalidBase64Error,
+ base64utils.base64_is_padded, 'AB=C')
+
+ self.assertRaises(base64utils.InvalidBase64Error,
+ base64utils.base64_is_padded, 'AB=')
+
+ self.assertRaises(base64utils.InvalidBase64Error,
+ base64utils.base64_is_padded, 'ABCD=')
+
+ self.assertRaises(ValueError, base64utils.base64_is_padded,
+ 'ABC', pad='==')
+ self.assertRaises(base64utils.InvalidBase64Error,
+ base64utils.base64_is_padded, 'A=BC')
+
+ def test_strip_padding(self):
+ self.assertEqual('ABCD', base64utils.base64_strip_padding('ABCD'))
+ self.assertEqual('ABC', base64utils.base64_strip_padding('ABC='))
+ self.assertEqual('AB', base64utils.base64_strip_padding('AB=='))
+ self.assertRaises(ValueError, base64utils.base64_strip_padding,
+ 'ABC=', pad='==')
+ self.assertEqual('ABC', base64utils.base64_strip_padding('ABC'))
+
+ def test_assure_padding(self):
+ self.assertEqual('ABCD', base64utils.base64_assure_padding('ABCD'))
+ self.assertEqual('ABC=', base64utils.base64_assure_padding('ABC'))
+ self.assertEqual('ABC=', base64utils.base64_assure_padding('ABC='))
+ self.assertEqual('AB==', base64utils.base64_assure_padding('AB'))
+ self.assertEqual('AB==', base64utils.base64_assure_padding('AB=='))
+ self.assertRaises(ValueError, base64utils.base64_assure_padding,
+ 'ABC', pad='==')
+
+ def test_base64_percent_encoding(self):
+ self.assertEqual('ABCD', base64utils.base64url_percent_encode('ABCD'))
+ self.assertEqual('ABC%3D',
+ base64utils.base64url_percent_encode('ABC='))
+ self.assertEqual('AB%3D%3D',
+ base64utils.base64url_percent_encode('AB=='))
+
+ self.assertEqual('ABCD', base64utils.base64url_percent_decode('ABCD'))
+ self.assertEqual('ABC=',
+ base64utils.base64url_percent_decode('ABC%3D'))
+ self.assertEqual('AB==',
+ base64utils.base64url_percent_decode('AB%3D%3D'))
+ self.assertRaises(base64utils.InvalidBase64Error,
+ base64utils.base64url_percent_encode, 'chars')
+ self.assertRaises(base64utils.InvalidBase64Error,
+ base64utils.base64url_percent_decode, 'AB%3D%3')
+
+
+class TestTextWrap(tests.BaseTestCase):
+
+ def test_wrapping(self):
+ raw_text = 'abcdefgh'
+ wrapped_text = 'abc\ndef\ngh\n'
+
+ self.assertEqual(wrapped_text,
+ base64utils.base64_wrap(raw_text, width=3))
+
+ t = '\n'.join(base64utils.base64_wrap_iter(raw_text, width=3)) + '\n'
+ self.assertEqual(wrapped_text, t)
+
+ raw_text = 'abcdefgh'
+ wrapped_text = 'abcd\nefgh\n'
+
+ self.assertEqual(wrapped_text,
+ base64utils.base64_wrap(raw_text, width=4))
diff --git a/keystone-moon/keystone/tests/unit/common/test_connection_pool.py b/keystone-moon/keystone/tests/unit/common/test_connection_pool.py
new file mode 100644
index 00000000..74d0420c
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/common/test_connection_pool.py
@@ -0,0 +1,119 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import time
+
+import mock
+from six.moves import queue
+import testtools
+from testtools import matchers
+
+from keystone.common.cache import _memcache_pool
+from keystone import exception
+from keystone.tests.unit import core
+
+
+class _TestConnectionPool(_memcache_pool.ConnectionPool):
+ destroyed_value = 'destroyed'
+
+ def _create_connection(self):
+ return mock.MagicMock()
+
+ def _destroy_connection(self, conn):
+ conn(self.destroyed_value)
+
+
+class TestConnectionPool(core.TestCase):
+ def setUp(self):
+ super(TestConnectionPool, self).setUp()
+ self.unused_timeout = 10
+ self.maxsize = 2
+ self.connection_pool = _TestConnectionPool(
+ maxsize=self.maxsize,
+ unused_timeout=self.unused_timeout)
+ self.addCleanup(self.cleanup_instance('connection_pool'))
+
+ def test_get_context_manager(self):
+ self.assertThat(self.connection_pool.queue, matchers.HasLength(0))
+ with self.connection_pool.acquire() as conn:
+ self.assertEqual(1, self.connection_pool._acquired)
+ self.assertEqual(0, self.connection_pool._acquired)
+ self.assertThat(self.connection_pool.queue, matchers.HasLength(1))
+ self.assertEqual(conn, self.connection_pool.queue[0].connection)
+
+ def test_cleanup_pool(self):
+ self.test_get_context_manager()
+ newtime = time.time() + self.unused_timeout * 2
+ non_expired_connection = _memcache_pool._PoolItem(
+ ttl=(newtime * 2),
+ connection=mock.MagicMock())
+ self.connection_pool.queue.append(non_expired_connection)
+ self.assertThat(self.connection_pool.queue, matchers.HasLength(2))
+ with mock.patch.object(time, 'time', return_value=newtime):
+ conn = self.connection_pool.queue[0].connection
+ with self.connection_pool.acquire():
+ pass
+ conn.assert_has_calls(
+ [mock.call(self.connection_pool.destroyed_value)])
+ self.assertThat(self.connection_pool.queue, matchers.HasLength(1))
+ self.assertEqual(0, non_expired_connection.connection.call_count)
+
+ def test_acquire_conn_exception_returns_acquired_count(self):
+ class TestException(Exception):
+ pass
+
+ with mock.patch.object(_TestConnectionPool, '_create_connection',
+ side_effect=TestException):
+ with testtools.ExpectedException(TestException):
+ with self.connection_pool.acquire():
+ pass
+ self.assertThat(self.connection_pool.queue,
+ matchers.HasLength(0))
+ self.assertEqual(0, self.connection_pool._acquired)
+
+ def test_connection_pool_limits_maximum_connections(self):
+ # NOTE(morganfainberg): To ensure we don't lockup tests until the
+ # job limit, explicitly call .get_nowait() and .put_nowait() in this
+ # case.
+ conn1 = self.connection_pool.get_nowait()
+ conn2 = self.connection_pool.get_nowait()
+
+ # Use a nowait version to raise an Empty exception indicating we would
+ # not get another connection until one is placed back into the queue.
+ self.assertRaises(queue.Empty, self.connection_pool.get_nowait)
+
+ # Place the connections back into the pool.
+ self.connection_pool.put_nowait(conn1)
+ self.connection_pool.put_nowait(conn2)
+
+ # Make sure we can get a connection out of the pool again.
+ self.connection_pool.get_nowait()
+
+ def test_connection_pool_maximum_connection_get_timeout(self):
+ connection_pool = _TestConnectionPool(
+ maxsize=1,
+ unused_timeout=self.unused_timeout,
+ conn_get_timeout=0)
+
+ def _acquire_connection():
+ with connection_pool.acquire():
+ pass
+
+ # Make sure we've consumed the only available connection from the pool
+ conn = connection_pool.get_nowait()
+
+ self.assertRaises(exception.UnexpectedError, _acquire_connection)
+
+ # Put the connection back and ensure we can acquire the connection
+ # after it is available.
+ connection_pool.put_nowait(conn)
+ _acquire_connection()
diff --git a/keystone-moon/keystone/tests/unit/common/test_injection.py b/keystone-moon/keystone/tests/unit/common/test_injection.py
new file mode 100644
index 00000000..86bb3c24
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/common/test_injection.py
@@ -0,0 +1,293 @@
+# Copyright 2012 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import uuid
+
+from keystone.common import dependency
+from keystone.tests import unit as tests
+
+
+class TestDependencyInjection(tests.BaseTestCase):
+ def setUp(self):
+ super(TestDependencyInjection, self).setUp()
+ self.addCleanup(dependency.reset)
+
+ def test_dependency_injection(self):
+ class Interface(object):
+ def do_work(self):
+ assert False
+
+ @dependency.provider('first_api')
+ class FirstImplementation(Interface):
+ def do_work(self):
+ return True
+
+ @dependency.provider('second_api')
+ class SecondImplementation(Interface):
+ def do_work(self):
+ return True
+
+ @dependency.requires('first_api', 'second_api')
+ class Consumer(object):
+ def do_work_with_dependencies(self):
+ assert self.first_api.do_work()
+ assert self.second_api.do_work()
+
+ # initialize dependency providers
+ first_api = FirstImplementation()
+ second_api = SecondImplementation()
+
+ # ... sometime later, initialize a dependency consumer
+ consumer = Consumer()
+
+ # the expected dependencies should be available to the consumer
+ self.assertIs(consumer.first_api, first_api)
+ self.assertIs(consumer.second_api, second_api)
+ self.assertIsInstance(consumer.first_api, Interface)
+ self.assertIsInstance(consumer.second_api, Interface)
+ consumer.do_work_with_dependencies()
+
+ def test_dependency_provider_configuration(self):
+ @dependency.provider('api')
+ class Configurable(object):
+ def __init__(self, value=None):
+ self.value = value
+
+ def get_value(self):
+ return self.value
+
+ @dependency.requires('api')
+ class Consumer(object):
+ def get_value(self):
+ return self.api.get_value()
+
+ # initialize dependency providers
+ api = Configurable(value=True)
+
+ # ... sometime later, initialize a dependency consumer
+ consumer = Consumer()
+
+ # the expected dependencies should be available to the consumer
+ self.assertIs(consumer.api, api)
+ self.assertIsInstance(consumer.api, Configurable)
+ self.assertTrue(consumer.get_value())
+
+ def test_dependency_consumer_configuration(self):
+ @dependency.provider('api')
+ class Provider(object):
+ def get_value(self):
+ return True
+
+ @dependency.requires('api')
+ class Configurable(object):
+ def __init__(self, value=None):
+ self.value = value
+
+ def get_value(self):
+ if self.value:
+ return self.api.get_value()
+
+ # initialize dependency providers
+ api = Provider()
+
+ # ... sometime later, initialize a dependency consumer
+ consumer = Configurable(value=True)
+
+ # the expected dependencies should be available to the consumer
+ self.assertIs(consumer.api, api)
+ self.assertIsInstance(consumer.api, Provider)
+ self.assertTrue(consumer.get_value())
+
+ def test_inherited_dependency(self):
+ class Interface(object):
+ def do_work(self):
+ assert False
+
+ @dependency.provider('first_api')
+ class FirstImplementation(Interface):
+ def do_work(self):
+ return True
+
+ @dependency.provider('second_api')
+ class SecondImplementation(Interface):
+ def do_work(self):
+ return True
+
+ @dependency.requires('first_api')
+ class ParentConsumer(object):
+ def do_work_with_dependencies(self):
+ assert self.first_api.do_work()
+
+ @dependency.requires('second_api')
+ class ChildConsumer(ParentConsumer):
+ def do_work_with_dependencies(self):
+ assert self.second_api.do_work()
+ super(ChildConsumer, self).do_work_with_dependencies()
+
+ # initialize dependency providers
+ first_api = FirstImplementation()
+ second_api = SecondImplementation()
+
+ # ... sometime later, initialize a dependency consumer
+ consumer = ChildConsumer()
+
+ # dependencies should be naturally inherited
+ self.assertEqual(
+ set(['first_api']),
+ ParentConsumer._dependencies)
+ self.assertEqual(
+ set(['first_api', 'second_api']),
+ ChildConsumer._dependencies)
+ self.assertEqual(
+ set(['first_api', 'second_api']),
+ consumer._dependencies)
+
+ # the expected dependencies should be available to the consumer
+ self.assertIs(consumer.first_api, first_api)
+ self.assertIs(consumer.second_api, second_api)
+ self.assertIsInstance(consumer.first_api, Interface)
+ self.assertIsInstance(consumer.second_api, Interface)
+ consumer.do_work_with_dependencies()
+
+ def test_unresolvable_dependency(self):
+ @dependency.requires(uuid.uuid4().hex)
+ class Consumer(object):
+ pass
+
+ def for_test():
+ Consumer()
+ dependency.resolve_future_dependencies()
+
+ self.assertRaises(dependency.UnresolvableDependencyException, for_test)
+
+ def test_circular_dependency(self):
+ p1_name = uuid.uuid4().hex
+ p2_name = uuid.uuid4().hex
+
+ @dependency.provider(p1_name)
+ @dependency.requires(p2_name)
+ class P1(object):
+ pass
+
+ @dependency.provider(p2_name)
+ @dependency.requires(p1_name)
+ class P2(object):
+ pass
+
+ p1 = P1()
+ p2 = P2()
+
+ dependency.resolve_future_dependencies()
+
+ self.assertIs(getattr(p1, p2_name), p2)
+ self.assertIs(getattr(p2, p1_name), p1)
+
+ def test_reset(self):
+ # Can reset the registry of providers.
+
+ p_id = uuid.uuid4().hex
+
+ @dependency.provider(p_id)
+ class P(object):
+ pass
+
+ p_inst = P()
+
+ self.assertIs(dependency.get_provider(p_id), p_inst)
+
+ dependency.reset()
+
+ self.assertFalse(dependency._REGISTRY)
+
+ def test_optional_dependency_not_provided(self):
+ requirement_name = uuid.uuid4().hex
+
+ @dependency.optional(requirement_name)
+ class C1(object):
+ pass
+
+ c1_inst = C1()
+
+ dependency.resolve_future_dependencies()
+
+ self.assertIsNone(getattr(c1_inst, requirement_name))
+
+ def test_optional_dependency_provided(self):
+ requirement_name = uuid.uuid4().hex
+
+ @dependency.optional(requirement_name)
+ class C1(object):
+ pass
+
+ @dependency.provider(requirement_name)
+ class P1(object):
+ pass
+
+ c1_inst = C1()
+ p1_inst = P1()
+
+ dependency.resolve_future_dependencies()
+
+ self.assertIs(getattr(c1_inst, requirement_name), p1_inst)
+
+ def test_optional_and_required(self):
+ p1_name = uuid.uuid4().hex
+ p2_name = uuid.uuid4().hex
+ optional_name = uuid.uuid4().hex
+
+ @dependency.provider(p1_name)
+ @dependency.requires(p2_name)
+ @dependency.optional(optional_name)
+ class P1(object):
+ pass
+
+ @dependency.provider(p2_name)
+ @dependency.requires(p1_name)
+ class P2(object):
+ pass
+
+ p1 = P1()
+ p2 = P2()
+
+ dependency.resolve_future_dependencies()
+
+ self.assertIs(getattr(p1, p2_name), p2)
+ self.assertIs(getattr(p2, p1_name), p1)
+ self.assertIsNone(getattr(p1, optional_name))
+
+ def test_get_provider(self):
+ # Can get the instance of a provider using get_provider
+
+ provider_name = uuid.uuid4().hex
+
+ @dependency.provider(provider_name)
+ class P(object):
+ pass
+
+ provider_instance = P()
+ retrieved_provider_instance = dependency.get_provider(provider_name)
+ self.assertIs(provider_instance, retrieved_provider_instance)
+
+ def test_get_provider_not_provided_error(self):
+ # If no provider and provider is required then fails.
+
+ provider_name = uuid.uuid4().hex
+ self.assertRaises(KeyError, dependency.get_provider, provider_name)
+
+ def test_get_provider_not_provided_optional(self):
+ # If no provider and provider is optional then returns None.
+
+ provider_name = uuid.uuid4().hex
+ self.assertIsNone(dependency.get_provider(provider_name,
+ dependency.GET_OPTIONAL))
diff --git a/keystone-moon/keystone/tests/unit/common/test_json_home.py b/keystone-moon/keystone/tests/unit/common/test_json_home.py
new file mode 100644
index 00000000..fb7f8448
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/common/test_json_home.py
@@ -0,0 +1,91 @@
+# Copyright 2014 IBM Corp.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+
+import copy
+
+from testtools import matchers
+
+from keystone.common import json_home
+from keystone.tests import unit as tests
+
+
+class JsonHomeTest(tests.BaseTestCase):
+ def test_build_v3_resource_relation(self):
+ resource_name = self.getUniqueString()
+ relation = json_home.build_v3_resource_relation(resource_name)
+ exp_relation = (
+ 'http://docs.openstack.org/api/openstack-identity/3/rel/%s' %
+ resource_name)
+ self.assertThat(relation, matchers.Equals(exp_relation))
+
+ def test_build_v3_extension_resource_relation(self):
+ extension_name = self.getUniqueString()
+ extension_version = self.getUniqueString()
+ resource_name = self.getUniqueString()
+ relation = json_home.build_v3_extension_resource_relation(
+ extension_name, extension_version, resource_name)
+ exp_relation = (
+ 'http://docs.openstack.org/api/openstack-identity/3/ext/%s/%s/rel/'
+ '%s' % (extension_name, extension_version, resource_name))
+ self.assertThat(relation, matchers.Equals(exp_relation))
+
+ def test_build_v3_parameter_relation(self):
+ parameter_name = self.getUniqueString()
+ relation = json_home.build_v3_parameter_relation(parameter_name)
+ exp_relation = (
+ 'http://docs.openstack.org/api/openstack-identity/3/param/%s' %
+ parameter_name)
+ self.assertThat(relation, matchers.Equals(exp_relation))
+
+ def test_build_v3_extension_parameter_relation(self):
+ extension_name = self.getUniqueString()
+ extension_version = self.getUniqueString()
+ parameter_name = self.getUniqueString()
+ relation = json_home.build_v3_extension_parameter_relation(
+ extension_name, extension_version, parameter_name)
+ exp_relation = (
+ 'http://docs.openstack.org/api/openstack-identity/3/ext/%s/%s/'
+ 'param/%s' % (extension_name, extension_version, parameter_name))
+ self.assertThat(relation, matchers.Equals(exp_relation))
+
+ def test_translate_urls(self):
+ href_rel = self.getUniqueString()
+ href = self.getUniqueString()
+ href_template_rel = self.getUniqueString()
+ href_template = self.getUniqueString()
+ href_vars = {self.getUniqueString(): self.getUniqueString()}
+ original_json_home = {
+ 'resources': {
+ href_rel: {'href': href},
+ href_template_rel: {
+ 'href-template': href_template,
+ 'href-vars': href_vars}
+ }
+ }
+
+ new_json_home = copy.deepcopy(original_json_home)
+ new_prefix = self.getUniqueString()
+ json_home.translate_urls(new_json_home, new_prefix)
+
+ exp_json_home = {
+ 'resources': {
+ href_rel: {'href': new_prefix + href},
+ href_template_rel: {
+ 'href-template': new_prefix + href_template,
+ 'href-vars': href_vars}
+ }
+ }
+
+ self.assertThat(new_json_home, matchers.Equals(exp_json_home))
diff --git a/keystone-moon/keystone/tests/unit/common/test_ldap.py b/keystone-moon/keystone/tests/unit/common/test_ldap.py
new file mode 100644
index 00000000..41568890
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/common/test_ldap.py
@@ -0,0 +1,502 @@
+# -*- coding: utf-8 -*-
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import uuid
+
+import ldap.dn
+import mock
+from oslo_config import cfg
+from testtools import matchers
+
+import os
+import shutil
+import tempfile
+
+from keystone.common import ldap as ks_ldap
+from keystone.common.ldap import core as common_ldap_core
+from keystone.tests import unit as tests
+from keystone.tests.unit import default_fixtures
+from keystone.tests.unit import fakeldap
+
+CONF = cfg.CONF
+
+
+class DnCompareTest(tests.BaseTestCase):
+ """Tests for the DN comparison functions in keystone.common.ldap.core."""
+
+ def test_prep(self):
+ # prep_case_insensitive returns the string with spaces at the front and
+ # end if it's already lowercase and no insignificant characters.
+ value = 'lowercase value'
+ self.assertEqual(value, ks_ldap.prep_case_insensitive(value))
+
+ def test_prep_lowercase(self):
+ # prep_case_insensitive returns the string with spaces at the front and
+ # end and lowercases the value.
+ value = 'UPPERCASE VALUE'
+ exp_value = value.lower()
+ self.assertEqual(exp_value, ks_ldap.prep_case_insensitive(value))
+
+ def test_prep_insignificant(self):
+ # prep_case_insensitive remove insignificant spaces.
+ value = 'before after'
+ exp_value = 'before after'
+ self.assertEqual(exp_value, ks_ldap.prep_case_insensitive(value))
+
+ def test_prep_insignificant_pre_post(self):
+ # prep_case_insensitive remove insignificant spaces.
+ value = ' value '
+ exp_value = 'value'
+ self.assertEqual(exp_value, ks_ldap.prep_case_insensitive(value))
+
+ def test_ava_equal_same(self):
+ # is_ava_value_equal returns True if the two values are the same.
+ value = 'val1'
+ self.assertTrue(ks_ldap.is_ava_value_equal('cn', value, value))
+
+ def test_ava_equal_complex(self):
+ # is_ava_value_equal returns True if the two values are the same using
+ # a value that's got different capitalization and insignificant chars.
+ val1 = 'before after'
+ val2 = ' BEFORE afTer '
+ self.assertTrue(ks_ldap.is_ava_value_equal('cn', val1, val2))
+
+ def test_ava_different(self):
+ # is_ava_value_equal returns False if the values aren't the same.
+ self.assertFalse(ks_ldap.is_ava_value_equal('cn', 'val1', 'val2'))
+
+ def test_rdn_same(self):
+ # is_rdn_equal returns True if the two values are the same.
+ rdn = ldap.dn.str2dn('cn=val1')[0]
+ self.assertTrue(ks_ldap.is_rdn_equal(rdn, rdn))
+
+ def test_rdn_diff_length(self):
+ # is_rdn_equal returns False if the RDNs have a different number of
+ # AVAs.
+ rdn1 = ldap.dn.str2dn('cn=cn1')[0]
+ rdn2 = ldap.dn.str2dn('cn=cn1+ou=ou1')[0]
+ self.assertFalse(ks_ldap.is_rdn_equal(rdn1, rdn2))
+
+ def test_rdn_multi_ava_same_order(self):
+ # is_rdn_equal returns True if the RDNs have the same number of AVAs
+ # and the values are the same.
+ rdn1 = ldap.dn.str2dn('cn=cn1+ou=ou1')[0]
+ rdn2 = ldap.dn.str2dn('cn=CN1+ou=OU1')[0]
+ self.assertTrue(ks_ldap.is_rdn_equal(rdn1, rdn2))
+
+ def test_rdn_multi_ava_diff_order(self):
+ # is_rdn_equal returns True if the RDNs have the same number of AVAs
+ # and the values are the same, even if in a different order
+ rdn1 = ldap.dn.str2dn('cn=cn1+ou=ou1')[0]
+ rdn2 = ldap.dn.str2dn('ou=OU1+cn=CN1')[0]
+ self.assertTrue(ks_ldap.is_rdn_equal(rdn1, rdn2))
+
+ def test_rdn_multi_ava_diff_type(self):
+ # is_rdn_equal returns False if the RDNs have the same number of AVAs
+ # and the attribute types are different.
+ rdn1 = ldap.dn.str2dn('cn=cn1+ou=ou1')[0]
+ rdn2 = ldap.dn.str2dn('cn=cn1+sn=sn1')[0]
+ self.assertFalse(ks_ldap.is_rdn_equal(rdn1, rdn2))
+
+ def test_rdn_attr_type_case_diff(self):
+ # is_rdn_equal returns True for same RDNs even when attr type case is
+ # different.
+ rdn1 = ldap.dn.str2dn('cn=cn1')[0]
+ rdn2 = ldap.dn.str2dn('CN=cn1')[0]
+ self.assertTrue(ks_ldap.is_rdn_equal(rdn1, rdn2))
+
+ def test_rdn_attr_type_alias(self):
+ # is_rdn_equal returns False for same RDNs even when attr type alias is
+ # used. Note that this is a limitation since an LDAP server should
+ # consider them equal.
+ rdn1 = ldap.dn.str2dn('cn=cn1')[0]
+ rdn2 = ldap.dn.str2dn('2.5.4.3=cn1')[0]
+ self.assertFalse(ks_ldap.is_rdn_equal(rdn1, rdn2))
+
+ def test_dn_same(self):
+ # is_dn_equal returns True if the DNs are the same.
+ dn = 'cn=Babs Jansen,ou=OpenStack'
+ self.assertTrue(ks_ldap.is_dn_equal(dn, dn))
+
+ def test_dn_equal_unicode(self):
+ # is_dn_equal can accept unicode
+ dn = u'cn=fäké,ou=OpenStack'
+ self.assertTrue(ks_ldap.is_dn_equal(dn, dn))
+
+ def test_dn_diff_length(self):
+ # is_dn_equal returns False if the DNs don't have the same number of
+ # RDNs
+ dn1 = 'cn=Babs Jansen,ou=OpenStack'
+ dn2 = 'cn=Babs Jansen,ou=OpenStack,dc=example.com'
+ self.assertFalse(ks_ldap.is_dn_equal(dn1, dn2))
+
+ def test_dn_equal_rdns(self):
+ # is_dn_equal returns True if the DNs have the same number of RDNs
+ # and each RDN is the same.
+ dn1 = 'cn=Babs Jansen,ou=OpenStack+cn=OpenSource'
+ dn2 = 'CN=Babs Jansen,cn=OpenSource+ou=OpenStack'
+ self.assertTrue(ks_ldap.is_dn_equal(dn1, dn2))
+
+ def test_dn_parsed_dns(self):
+ # is_dn_equal can also accept parsed DNs.
+ dn_str1 = ldap.dn.str2dn('cn=Babs Jansen,ou=OpenStack+cn=OpenSource')
+ dn_str2 = ldap.dn.str2dn('CN=Babs Jansen,cn=OpenSource+ou=OpenStack')
+ self.assertTrue(ks_ldap.is_dn_equal(dn_str1, dn_str2))
+
+ def test_startswith_under_child(self):
+ # dn_startswith returns True if descendant_dn is a child of dn.
+ child = 'cn=Babs Jansen,ou=OpenStack'
+ parent = 'ou=OpenStack'
+ self.assertTrue(ks_ldap.dn_startswith(child, parent))
+
+ def test_startswith_parent(self):
+ # dn_startswith returns False if descendant_dn is a parent of dn.
+ child = 'cn=Babs Jansen,ou=OpenStack'
+ parent = 'ou=OpenStack'
+ self.assertFalse(ks_ldap.dn_startswith(parent, child))
+
+ def test_startswith_same(self):
+ # dn_startswith returns False if DNs are the same.
+ dn = 'cn=Babs Jansen,ou=OpenStack'
+ self.assertFalse(ks_ldap.dn_startswith(dn, dn))
+
+ def test_startswith_not_parent(self):
+ # dn_startswith returns False if descendant_dn is not under the dn
+ child = 'cn=Babs Jansen,ou=OpenStack'
+ parent = 'dc=example.com'
+ self.assertFalse(ks_ldap.dn_startswith(child, parent))
+
+ def test_startswith_descendant(self):
+ # dn_startswith returns True if descendant_dn is a descendant of dn.
+ descendant = 'cn=Babs Jansen,ou=Keystone,ou=OpenStack,dc=example.com'
+ dn = 'ou=OpenStack,dc=example.com'
+ self.assertTrue(ks_ldap.dn_startswith(descendant, dn))
+
+ descendant = 'uid=12345,ou=Users,dc=example,dc=com'
+ dn = 'ou=Users,dc=example,dc=com'
+ self.assertTrue(ks_ldap.dn_startswith(descendant, dn))
+
+ def test_startswith_parsed_dns(self):
+ # dn_startswith also accepts parsed DNs.
+ descendant = ldap.dn.str2dn('cn=Babs Jansen,ou=OpenStack')
+ dn = ldap.dn.str2dn('ou=OpenStack')
+ self.assertTrue(ks_ldap.dn_startswith(descendant, dn))
+
+ def test_startswith_unicode(self):
+ # dn_startswith accepts unicode.
+ child = u'cn=cn=fäké,ou=OpenStäck'
+ parent = 'ou=OpenStäck'
+ self.assertTrue(ks_ldap.dn_startswith(child, parent))
+
+
+class LDAPDeleteTreeTest(tests.TestCase):
+
+ def setUp(self):
+ super(LDAPDeleteTreeTest, self).setUp()
+
+ ks_ldap.register_handler('fake://',
+ fakeldap.FakeLdapNoSubtreeDelete)
+ self.load_backends()
+ self.load_fixtures(default_fixtures)
+
+ self.addCleanup(self.clear_database)
+ self.addCleanup(common_ldap_core._HANDLERS.clear)
+
+ def clear_database(self):
+ for shelf in fakeldap.FakeShelves:
+ fakeldap.FakeShelves[shelf].clear()
+
+ def config_overrides(self):
+ super(LDAPDeleteTreeTest, self).config_overrides()
+ self.config_fixture.config(
+ group='identity',
+ driver='keystone.identity.backends.ldap.Identity')
+
+ def config_files(self):
+ config_files = super(LDAPDeleteTreeTest, self).config_files()
+ config_files.append(tests.dirs.tests_conf('backend_ldap.conf'))
+ return config_files
+
+ def test_deleteTree(self):
+ """Test manually deleting a tree.
+
+ Few LDAP servers support CONTROL_DELETETREE. This test
+ exercises the alternate code paths in BaseLdap.deleteTree.
+
+ """
+ conn = self.identity_api.user.get_connection()
+ id_attr = self.identity_api.user.id_attr
+ objclass = self.identity_api.user.object_class.lower()
+ tree_dn = self.identity_api.user.tree_dn
+
+ def create_entry(name, parent_dn=None):
+ if not parent_dn:
+ parent_dn = tree_dn
+ dn = '%s=%s,%s' % (id_attr, name, parent_dn)
+ attrs = [('objectclass', [objclass, 'ldapsubentry']),
+ (id_attr, [name])]
+ conn.add_s(dn, attrs)
+ return dn
+
+ # create 3 entries like this:
+ # cn=base
+ # cn=child,cn=base
+ # cn=grandchild,cn=child,cn=base
+ # then attempt to deleteTree(cn=base)
+ base_id = 'base'
+ base_dn = create_entry(base_id)
+ child_dn = create_entry('child', base_dn)
+ grandchild_dn = create_entry('grandchild', child_dn)
+
+ # verify that the three entries were created
+ scope = ldap.SCOPE_SUBTREE
+ filt = '(|(objectclass=*)(objectclass=ldapsubentry))'
+ entries = conn.search_s(base_dn, scope, filt,
+ attrlist=common_ldap_core.DN_ONLY)
+ self.assertThat(entries, matchers.HasLength(3))
+ sort_ents = sorted([e[0] for e in entries], key=len, reverse=True)
+ self.assertEqual([grandchild_dn, child_dn, base_dn], sort_ents)
+
+ # verify that a non-leaf node can't be deleted directly by the
+ # LDAP server
+ self.assertRaises(ldap.NOT_ALLOWED_ON_NONLEAF,
+ conn.delete_s, base_dn)
+ self.assertRaises(ldap.NOT_ALLOWED_ON_NONLEAF,
+ conn.delete_s, child_dn)
+
+ # call our deleteTree implementation
+ self.identity_api.user.deleteTree(base_id)
+ self.assertRaises(ldap.NO_SUCH_OBJECT,
+ conn.search_s, base_dn, ldap.SCOPE_BASE)
+ self.assertRaises(ldap.NO_SUCH_OBJECT,
+ conn.search_s, child_dn, ldap.SCOPE_BASE)
+ self.assertRaises(ldap.NO_SUCH_OBJECT,
+ conn.search_s, grandchild_dn, ldap.SCOPE_BASE)
+
+
+class SslTlsTest(tests.TestCase):
+ """Tests for the SSL/TLS functionality in keystone.common.ldap.core."""
+
+ @mock.patch.object(ks_ldap.core.KeystoneLDAPHandler, 'simple_bind_s')
+ @mock.patch.object(ldap.ldapobject.LDAPObject, 'start_tls_s')
+ def _init_ldap_connection(self, config, mock_ldap_one, mock_ldap_two):
+ # Attempt to connect to initialize python-ldap.
+ base_ldap = ks_ldap.BaseLdap(config)
+ base_ldap.get_connection()
+
+ def test_certfile_trust_tls(self):
+ # We need this to actually exist, so we create a tempfile.
+ (handle, certfile) = tempfile.mkstemp()
+ self.addCleanup(os.unlink, certfile)
+ self.addCleanup(os.close, handle)
+ self.config_fixture.config(group='ldap',
+ url='ldap://localhost',
+ use_tls=True,
+ tls_cacertfile=certfile)
+
+ self._init_ldap_connection(CONF)
+
+ # Ensure the cert trust option is set.
+ self.assertEqual(certfile, ldap.get_option(ldap.OPT_X_TLS_CACERTFILE))
+
+ def test_certdir_trust_tls(self):
+ # We need this to actually exist, so we create a tempdir.
+ certdir = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, certdir)
+ self.config_fixture.config(group='ldap',
+ url='ldap://localhost',
+ use_tls=True,
+ tls_cacertdir=certdir)
+
+ self._init_ldap_connection(CONF)
+
+ # Ensure the cert trust option is set.
+ self.assertEqual(certdir, ldap.get_option(ldap.OPT_X_TLS_CACERTDIR))
+
+ def test_certfile_trust_ldaps(self):
+ # We need this to actually exist, so we create a tempfile.
+ (handle, certfile) = tempfile.mkstemp()
+ self.addCleanup(os.unlink, certfile)
+ self.addCleanup(os.close, handle)
+ self.config_fixture.config(group='ldap',
+ url='ldaps://localhost',
+ use_tls=False,
+ tls_cacertfile=certfile)
+
+ self._init_ldap_connection(CONF)
+
+ # Ensure the cert trust option is set.
+ self.assertEqual(certfile, ldap.get_option(ldap.OPT_X_TLS_CACERTFILE))
+
+ def test_certdir_trust_ldaps(self):
+ # We need this to actually exist, so we create a tempdir.
+ certdir = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, certdir)
+ self.config_fixture.config(group='ldap',
+ url='ldaps://localhost',
+ use_tls=False,
+ tls_cacertdir=certdir)
+
+ self._init_ldap_connection(CONF)
+
+ # Ensure the cert trust option is set.
+ self.assertEqual(certdir, ldap.get_option(ldap.OPT_X_TLS_CACERTDIR))
+
+
+class LDAPPagedResultsTest(tests.TestCase):
+ """Tests the paged results functionality in keystone.common.ldap.core."""
+
+ def setUp(self):
+ super(LDAPPagedResultsTest, self).setUp()
+ self.clear_database()
+
+ ks_ldap.register_handler('fake://', fakeldap.FakeLdap)
+ self.addCleanup(common_ldap_core._HANDLERS.clear)
+
+ self.load_backends()
+ self.load_fixtures(default_fixtures)
+
+ def clear_database(self):
+ for shelf in fakeldap.FakeShelves:
+ fakeldap.FakeShelves[shelf].clear()
+
+ def config_overrides(self):
+ super(LDAPPagedResultsTest, self).config_overrides()
+ self.config_fixture.config(
+ group='identity',
+ driver='keystone.identity.backends.ldap.Identity')
+
+ def config_files(self):
+ config_files = super(LDAPPagedResultsTest, self).config_files()
+ config_files.append(tests.dirs.tests_conf('backend_ldap.conf'))
+ return config_files
+
+ @mock.patch.object(fakeldap.FakeLdap, 'search_ext')
+ @mock.patch.object(fakeldap.FakeLdap, 'result3')
+ def test_paged_results_control_api(self, mock_result3, mock_search_ext):
+ mock_result3.return_value = ('', [], 1, [])
+
+ self.config_fixture.config(group='ldap',
+ page_size=1)
+
+ conn = self.identity_api.user.get_connection()
+ conn._paged_search_s('dc=example,dc=test',
+ ldap.SCOPE_SUBTREE,
+ 'objectclass=*')
+
+
+class CommonLdapTestCase(tests.BaseTestCase):
+ """These test cases call functions in keystone.common.ldap."""
+
+ def test_binary_attribute_values(self):
+ result = [(
+ 'cn=junk,dc=example,dc=com',
+ {
+ 'cn': ['junk'],
+ 'sn': [uuid.uuid4().hex],
+ 'mail': [uuid.uuid4().hex],
+ 'binary_attr': ['\x00\xFF\x00\xFF']
+ }
+ ), ]
+ py_result = ks_ldap.convert_ldap_result(result)
+ # The attribute containing the binary value should
+ # not be present in the converted result.
+ self.assertNotIn('binary_attr', py_result[0][1])
+
+ def test_utf8_conversion(self):
+ value_unicode = u'fäké1'
+ value_utf8 = value_unicode.encode('utf-8')
+
+ result_utf8 = ks_ldap.utf8_encode(value_unicode)
+ self.assertEqual(value_utf8, result_utf8)
+
+ result_utf8 = ks_ldap.utf8_encode(value_utf8)
+ self.assertEqual(value_utf8, result_utf8)
+
+ result_unicode = ks_ldap.utf8_decode(value_utf8)
+ self.assertEqual(value_unicode, result_unicode)
+
+ result_unicode = ks_ldap.utf8_decode(value_unicode)
+ self.assertEqual(value_unicode, result_unicode)
+
+ self.assertRaises(TypeError,
+ ks_ldap.utf8_encode,
+ 100)
+
+ result_unicode = ks_ldap.utf8_decode(100)
+ self.assertEqual(u'100', result_unicode)
+
+ def test_user_id_begins_with_0(self):
+ user_id = '0123456'
+ result = [(
+ 'cn=dummy,dc=example,dc=com',
+ {
+ 'user_id': [user_id],
+ 'enabled': ['TRUE']
+ }
+ ), ]
+ py_result = ks_ldap.convert_ldap_result(result)
+ # The user id should be 0123456, and the enabled
+ # flag should be True
+ self.assertIs(py_result[0][1]['enabled'][0], True)
+ self.assertEqual(user_id, py_result[0][1]['user_id'][0])
+
+ def test_user_id_begins_with_0_and_enabled_bit_mask(self):
+ user_id = '0123456'
+ bitmask = '225'
+ expected_bitmask = 225
+ result = [(
+ 'cn=dummy,dc=example,dc=com',
+ {
+ 'user_id': [user_id],
+ 'enabled': [bitmask]
+ }
+ ), ]
+ py_result = ks_ldap.convert_ldap_result(result)
+ # The user id should be 0123456, and the enabled
+ # flag should be 225
+ self.assertEqual(expected_bitmask, py_result[0][1]['enabled'][0])
+ self.assertEqual(user_id, py_result[0][1]['user_id'][0])
+
+ def test_user_id_and_bitmask_begins_with_0(self):
+ user_id = '0123456'
+ bitmask = '0225'
+ expected_bitmask = 225
+ result = [(
+ 'cn=dummy,dc=example,dc=com',
+ {
+ 'user_id': [user_id],
+ 'enabled': [bitmask]
+ }
+ ), ]
+ py_result = ks_ldap.convert_ldap_result(result)
+ # The user id should be 0123456, and the enabled
+ # flag should be 225, the 0 is dropped.
+ self.assertEqual(expected_bitmask, py_result[0][1]['enabled'][0])
+ self.assertEqual(user_id, py_result[0][1]['user_id'][0])
+
+ def test_user_id_and_user_name_with_boolean_string(self):
+ boolean_strings = ['TRUE', 'FALSE', 'true', 'false', 'True', 'False',
+ 'TrUe' 'FaLse']
+ for user_name in boolean_strings:
+ user_id = uuid.uuid4().hex
+ result = [(
+ 'cn=dummy,dc=example,dc=com',
+ {
+ 'user_id': [user_id],
+ 'user_name': [user_name]
+ }
+ ), ]
+ py_result = ks_ldap.convert_ldap_result(result)
+ # The user name should still be a string value.
+ self.assertEqual(user_name, py_result[0][1]['user_name'][0])
diff --git a/keystone-moon/keystone/tests/unit/common/test_notifications.py b/keystone-moon/keystone/tests/unit/common/test_notifications.py
new file mode 100644
index 00000000..55dd556d
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/common/test_notifications.py
@@ -0,0 +1,974 @@
+# Copyright 2013 IBM Corp.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import logging
+import uuid
+
+import mock
+from oslo_config import cfg
+from oslo_config import fixture as config_fixture
+from oslotest import mockpatch
+from pycadf import cadftaxonomy
+from pycadf import cadftype
+from pycadf import eventfactory
+from pycadf import resource as cadfresource
+import testtools
+
+from keystone.common import dependency
+from keystone import notifications
+from keystone.tests.unit import test_v3
+
+
+CONF = cfg.CONF
+
+EXP_RESOURCE_TYPE = uuid.uuid4().hex
+CREATED_OPERATION = notifications.ACTIONS.created
+UPDATED_OPERATION = notifications.ACTIONS.updated
+DELETED_OPERATION = notifications.ACTIONS.deleted
+DISABLED_OPERATION = notifications.ACTIONS.disabled
+
+
+class ArbitraryException(Exception):
+ pass
+
+
+def register_callback(operation, resource_type=EXP_RESOURCE_TYPE):
+ """Helper for creating and registering a mock callback.
+
+ """
+ callback = mock.Mock(__name__='callback',
+ im_class=mock.Mock(__name__='class'))
+ notifications.register_event_callback(operation, resource_type, callback)
+ return callback
+
+
+class AuditNotificationsTestCase(testtools.TestCase):
+ def setUp(self):
+ super(AuditNotificationsTestCase, self).setUp()
+ self.config_fixture = self.useFixture(config_fixture.Config(CONF))
+ self.addCleanup(notifications.clear_subscribers)
+
+ def _test_notification_operation(self, notify_function, operation):
+ exp_resource_id = uuid.uuid4().hex
+ callback = register_callback(operation)
+ notify_function(EXP_RESOURCE_TYPE, exp_resource_id)
+ callback.assert_called_once_with('identity', EXP_RESOURCE_TYPE,
+ operation,
+ {'resource_info': exp_resource_id})
+ self.config_fixture.config(notification_format='cadf')
+ with mock.patch(
+ 'keystone.notifications._create_cadf_payload') as cadf_notify:
+ notify_function(EXP_RESOURCE_TYPE, exp_resource_id)
+ initiator = None
+ cadf_notify.assert_called_once_with(
+ operation, EXP_RESOURCE_TYPE, exp_resource_id,
+ notifications.taxonomy.OUTCOME_SUCCESS, initiator)
+ notify_function(EXP_RESOURCE_TYPE, exp_resource_id, public=False)
+ cadf_notify.assert_called_once_with(
+ operation, EXP_RESOURCE_TYPE, exp_resource_id,
+ notifications.taxonomy.OUTCOME_SUCCESS, initiator)
+
+ def test_resource_created_notification(self):
+ self._test_notification_operation(notifications.Audit.created,
+ CREATED_OPERATION)
+
+ def test_resource_updated_notification(self):
+ self._test_notification_operation(notifications.Audit.updated,
+ UPDATED_OPERATION)
+
+ def test_resource_deleted_notification(self):
+ self._test_notification_operation(notifications.Audit.deleted,
+ DELETED_OPERATION)
+
+ def test_resource_disabled_notification(self):
+ self._test_notification_operation(notifications.Audit.disabled,
+ DISABLED_OPERATION)
+
+
+class NotificationsWrapperTestCase(testtools.TestCase):
+ def create_fake_ref(self):
+ resource_id = uuid.uuid4().hex
+ return resource_id, {
+ 'id': resource_id,
+ 'key': uuid.uuid4().hex
+ }
+
+ @notifications.created(EXP_RESOURCE_TYPE)
+ def create_resource(self, resource_id, data):
+ return data
+
+ def test_resource_created_notification(self):
+ exp_resource_id, data = self.create_fake_ref()
+ callback = register_callback(CREATED_OPERATION)
+
+ self.create_resource(exp_resource_id, data)
+ callback.assert_called_with('identity', EXP_RESOURCE_TYPE,
+ CREATED_OPERATION,
+ {'resource_info': exp_resource_id})
+
+ @notifications.updated(EXP_RESOURCE_TYPE)
+ def update_resource(self, resource_id, data):
+ return data
+
+ def test_resource_updated_notification(self):
+ exp_resource_id, data = self.create_fake_ref()
+ callback = register_callback(UPDATED_OPERATION)
+
+ self.update_resource(exp_resource_id, data)
+ callback.assert_called_with('identity', EXP_RESOURCE_TYPE,
+ UPDATED_OPERATION,
+ {'resource_info': exp_resource_id})
+
+ @notifications.deleted(EXP_RESOURCE_TYPE)
+ def delete_resource(self, resource_id):
+ pass
+
+ def test_resource_deleted_notification(self):
+ exp_resource_id = uuid.uuid4().hex
+ callback = register_callback(DELETED_OPERATION)
+
+ self.delete_resource(exp_resource_id)
+ callback.assert_called_with('identity', EXP_RESOURCE_TYPE,
+ DELETED_OPERATION,
+ {'resource_info': exp_resource_id})
+
+ @notifications.created(EXP_RESOURCE_TYPE)
+ def create_exception(self, resource_id):
+ raise ArbitraryException()
+
+ def test_create_exception_without_notification(self):
+ callback = register_callback(CREATED_OPERATION)
+ self.assertRaises(
+ ArbitraryException, self.create_exception, uuid.uuid4().hex)
+ self.assertFalse(callback.called)
+
+ @notifications.created(EXP_RESOURCE_TYPE)
+ def update_exception(self, resource_id):
+ raise ArbitraryException()
+
+ def test_update_exception_without_notification(self):
+ callback = register_callback(UPDATED_OPERATION)
+ self.assertRaises(
+ ArbitraryException, self.update_exception, uuid.uuid4().hex)
+ self.assertFalse(callback.called)
+
+ @notifications.deleted(EXP_RESOURCE_TYPE)
+ def delete_exception(self, resource_id):
+ raise ArbitraryException()
+
+ def test_delete_exception_without_notification(self):
+ callback = register_callback(DELETED_OPERATION)
+ self.assertRaises(
+ ArbitraryException, self.delete_exception, uuid.uuid4().hex)
+ self.assertFalse(callback.called)
+
+
+class NotificationsTestCase(testtools.TestCase):
+ def setUp(self):
+ super(NotificationsTestCase, self).setUp()
+
+ # these should use self.config_fixture.config(), but they haven't
+ # been registered yet
+ CONF.rpc_backend = 'fake'
+ CONF.notification_driver = ['fake']
+
+ def test_send_notification(self):
+ """Test the private method _send_notification to ensure event_type,
+ payload, and context are built and passed properly.
+ """
+ resource = uuid.uuid4().hex
+ resource_type = EXP_RESOURCE_TYPE
+ operation = CREATED_OPERATION
+
+ # NOTE(ldbragst): Even though notifications._send_notification doesn't
+ # contain logic that creates cases, this is supposed to test that
+ # context is always empty and that we ensure the resource ID of the
+ # resource in the notification is contained in the payload. It was
+ # agreed that context should be empty in Keystone's case, which is
+ # also noted in the /keystone/notifications.py module. This test
+ # ensures and maintains these conditions.
+ expected_args = [
+ {}, # empty context
+ 'identity.%s.created' % resource_type, # event_type
+ {'resource_info': resource}, # payload
+ 'INFO', # priority is always INFO...
+ ]
+
+ with mock.patch.object(notifications._get_notifier(),
+ '_notify') as mocked:
+ notifications._send_notification(operation, resource_type,
+ resource)
+ mocked.assert_called_once_with(*expected_args)
+
+
+class BaseNotificationTest(test_v3.RestfulTestCase):
+
+ def setUp(self):
+ super(BaseNotificationTest, self).setUp()
+
+ self._notifications = []
+ self._audits = []
+
+ def fake_notify(operation, resource_type, resource_id,
+ public=True):
+ note = {
+ 'resource_id': resource_id,
+ 'operation': operation,
+ 'resource_type': resource_type,
+ 'send_notification_called': True,
+ 'public': public}
+ self._notifications.append(note)
+
+ self.useFixture(mockpatch.PatchObject(
+ notifications, '_send_notification', fake_notify))
+
+ def fake_audit(action, initiator, outcome, target,
+ event_type, **kwargs):
+ service_security = cadftaxonomy.SERVICE_SECURITY
+
+ event = eventfactory.EventFactory().new_event(
+ eventType=cadftype.EVENTTYPE_ACTIVITY,
+ outcome=outcome,
+ action=action,
+ initiator=initiator,
+ target=target,
+ observer=cadfresource.Resource(typeURI=service_security))
+
+ for key, value in kwargs.items():
+ setattr(event, key, value)
+
+ audit = {
+ 'payload': event.as_dict(),
+ 'event_type': event_type,
+ 'send_notification_called': True}
+ self._audits.append(audit)
+
+ self.useFixture(mockpatch.PatchObject(
+ notifications, '_send_audit_notification', fake_audit))
+
+ def _assert_last_note(self, resource_id, operation, resource_type):
+ # NOTE(stevemar): If 'basic' format is not used, then simply
+ # return since this assertion is not valid.
+ if CONF.notification_format != 'basic':
+ return
+ self.assertTrue(len(self._notifications) > 0)
+ note = self._notifications[-1]
+ self.assertEqual(note['operation'], operation)
+ self.assertEqual(note['resource_id'], resource_id)
+ self.assertEqual(note['resource_type'], resource_type)
+ self.assertTrue(note['send_notification_called'])
+
+ def _assert_last_audit(self, resource_id, operation, resource_type,
+ target_uri):
+ # NOTE(stevemar): If 'cadf' format is not used, then simply
+ # return since this assertion is not valid.
+ if CONF.notification_format != 'cadf':
+ return
+ self.assertTrue(len(self._audits) > 0)
+ audit = self._audits[-1]
+ payload = audit['payload']
+ self.assertEqual(resource_id, payload['resource_info'])
+ action = '%s.%s' % (operation, resource_type)
+ self.assertEqual(action, payload['action'])
+ self.assertEqual(target_uri, payload['target']['typeURI'])
+ self.assertEqual(resource_id, payload['target']['id'])
+ event_type = '%s.%s.%s' % ('identity', resource_type, operation)
+ self.assertEqual(event_type, audit['event_type'])
+ self.assertTrue(audit['send_notification_called'])
+
+ def _assert_notify_not_sent(self, resource_id, operation, resource_type,
+ public=True):
+ unexpected = {
+ 'resource_id': resource_id,
+ 'operation': operation,
+ 'resource_type': resource_type,
+ 'send_notification_called': True,
+ 'public': public}
+ for note in self._notifications:
+ self.assertNotEqual(unexpected, note)
+
+ def _assert_notify_sent(self, resource_id, operation, resource_type,
+ public=True):
+ expected = {
+ 'resource_id': resource_id,
+ 'operation': operation,
+ 'resource_type': resource_type,
+ 'send_notification_called': True,
+ 'public': public}
+ for note in self._notifications:
+ if expected == note:
+ break
+ else:
+ self.fail("Notification not sent.")
+
+
+class NotificationsForEntities(BaseNotificationTest):
+
+ def test_create_group(self):
+ group_ref = self.new_group_ref(domain_id=self.domain_id)
+ group_ref = self.identity_api.create_group(group_ref)
+ self._assert_last_note(group_ref['id'], CREATED_OPERATION, 'group')
+ self._assert_last_audit(group_ref['id'], CREATED_OPERATION, 'group',
+ cadftaxonomy.SECURITY_GROUP)
+
+ def test_create_project(self):
+ project_ref = self.new_project_ref(domain_id=self.domain_id)
+ self.assignment_api.create_project(project_ref['id'], project_ref)
+ self._assert_last_note(
+ project_ref['id'], CREATED_OPERATION, 'project')
+ self._assert_last_audit(project_ref['id'], CREATED_OPERATION,
+ 'project', cadftaxonomy.SECURITY_PROJECT)
+
+ def test_create_role(self):
+ role_ref = self.new_role_ref()
+ self.role_api.create_role(role_ref['id'], role_ref)
+ self._assert_last_note(role_ref['id'], CREATED_OPERATION, 'role')
+ self._assert_last_audit(role_ref['id'], CREATED_OPERATION, 'role',
+ cadftaxonomy.SECURITY_ROLE)
+
+ def test_create_user(self):
+ user_ref = self.new_user_ref(domain_id=self.domain_id)
+ user_ref = self.identity_api.create_user(user_ref)
+ self._assert_last_note(user_ref['id'], CREATED_OPERATION, 'user')
+ self._assert_last_audit(user_ref['id'], CREATED_OPERATION, 'user',
+ cadftaxonomy.SECURITY_ACCOUNT_USER)
+
+ def test_create_trust(self):
+ trustor = self.new_user_ref(domain_id=self.domain_id)
+ trustor = self.identity_api.create_user(trustor)
+ trustee = self.new_user_ref(domain_id=self.domain_id)
+ trustee = self.identity_api.create_user(trustee)
+ role_ref = self.new_role_ref()
+ self.role_api.create_role(role_ref['id'], role_ref)
+ trust_ref = self.new_trust_ref(trustor['id'],
+ trustee['id'])
+ self.trust_api.create_trust(trust_ref['id'],
+ trust_ref,
+ [role_ref])
+ self._assert_last_note(
+ trust_ref['id'], CREATED_OPERATION, 'OS-TRUST:trust')
+ self._assert_last_audit(trust_ref['id'], CREATED_OPERATION,
+ 'OS-TRUST:trust', cadftaxonomy.SECURITY_TRUST)
+
+ def test_delete_group(self):
+ group_ref = self.new_group_ref(domain_id=self.domain_id)
+ group_ref = self.identity_api.create_group(group_ref)
+ self.identity_api.delete_group(group_ref['id'])
+ self._assert_last_note(group_ref['id'], DELETED_OPERATION, 'group')
+ self._assert_last_audit(group_ref['id'], DELETED_OPERATION, 'group',
+ cadftaxonomy.SECURITY_GROUP)
+
+ def test_delete_project(self):
+ project_ref = self.new_project_ref(domain_id=self.domain_id)
+ self.assignment_api.create_project(project_ref['id'], project_ref)
+ self.assignment_api.delete_project(project_ref['id'])
+ self._assert_last_note(
+ project_ref['id'], DELETED_OPERATION, 'project')
+ self._assert_last_audit(project_ref['id'], DELETED_OPERATION,
+ 'project', cadftaxonomy.SECURITY_PROJECT)
+
+ def test_delete_role(self):
+ role_ref = self.new_role_ref()
+ self.role_api.create_role(role_ref['id'], role_ref)
+ self.role_api.delete_role(role_ref['id'])
+ self._assert_last_note(role_ref['id'], DELETED_OPERATION, 'role')
+ self._assert_last_audit(role_ref['id'], DELETED_OPERATION, 'role',
+ cadftaxonomy.SECURITY_ROLE)
+
+ def test_delete_user(self):
+ user_ref = self.new_user_ref(domain_id=self.domain_id)
+ user_ref = self.identity_api.create_user(user_ref)
+ self.identity_api.delete_user(user_ref['id'])
+ self._assert_last_note(user_ref['id'], DELETED_OPERATION, 'user')
+ self._assert_last_audit(user_ref['id'], DELETED_OPERATION, 'user',
+ cadftaxonomy.SECURITY_ACCOUNT_USER)
+
+ def test_create_domain(self):
+ domain_ref = self.new_domain_ref()
+ self.resource_api.create_domain(domain_ref['id'], domain_ref)
+ self._assert_last_note(domain_ref['id'], CREATED_OPERATION, 'domain')
+ self._assert_last_audit(domain_ref['id'], CREATED_OPERATION, 'domain',
+ cadftaxonomy.SECURITY_DOMAIN)
+
+ def test_update_domain(self):
+ domain_ref = self.new_domain_ref()
+ self.assignment_api.create_domain(domain_ref['id'], domain_ref)
+ domain_ref['description'] = uuid.uuid4().hex
+ self.assignment_api.update_domain(domain_ref['id'], domain_ref)
+ self._assert_last_note(domain_ref['id'], UPDATED_OPERATION, 'domain')
+ self._assert_last_audit(domain_ref['id'], UPDATED_OPERATION, 'domain',
+ cadftaxonomy.SECURITY_DOMAIN)
+
+ def test_delete_domain(self):
+ domain_ref = self.new_domain_ref()
+ self.assignment_api.create_domain(domain_ref['id'], domain_ref)
+ domain_ref['enabled'] = False
+ self.assignment_api.update_domain(domain_ref['id'], domain_ref)
+ self.assignment_api.delete_domain(domain_ref['id'])
+ self._assert_last_note(domain_ref['id'], DELETED_OPERATION, 'domain')
+ self._assert_last_audit(domain_ref['id'], DELETED_OPERATION, 'domain',
+ cadftaxonomy.SECURITY_DOMAIN)
+
+ def test_delete_trust(self):
+ trustor = self.new_user_ref(domain_id=self.domain_id)
+ trustor = self.identity_api.create_user(trustor)
+ trustee = self.new_user_ref(domain_id=self.domain_id)
+ trustee = self.identity_api.create_user(trustee)
+ role_ref = self.new_role_ref()
+ trust_ref = self.new_trust_ref(trustor['id'], trustee['id'])
+ self.trust_api.create_trust(trust_ref['id'],
+ trust_ref,
+ [role_ref])
+ self.trust_api.delete_trust(trust_ref['id'])
+ self._assert_last_note(
+ trust_ref['id'], DELETED_OPERATION, 'OS-TRUST:trust')
+ self._assert_last_audit(trust_ref['id'], DELETED_OPERATION,
+ 'OS-TRUST:trust', cadftaxonomy.SECURITY_TRUST)
+
+ def test_create_endpoint(self):
+ endpoint_ref = self.new_endpoint_ref(service_id=self.service_id)
+ self.catalog_api.create_endpoint(endpoint_ref['id'], endpoint_ref)
+ self._assert_notify_sent(endpoint_ref['id'], CREATED_OPERATION,
+ 'endpoint')
+ self._assert_last_audit(endpoint_ref['id'], CREATED_OPERATION,
+ 'endpoint', cadftaxonomy.SECURITY_ENDPOINT)
+
+ def test_update_endpoint(self):
+ endpoint_ref = self.new_endpoint_ref(service_id=self.service_id)
+ self.catalog_api.create_endpoint(endpoint_ref['id'], endpoint_ref)
+ self.catalog_api.update_endpoint(endpoint_ref['id'], endpoint_ref)
+ self._assert_notify_sent(endpoint_ref['id'], UPDATED_OPERATION,
+ 'endpoint')
+ self._assert_last_audit(endpoint_ref['id'], UPDATED_OPERATION,
+ 'endpoint', cadftaxonomy.SECURITY_ENDPOINT)
+
+ def test_delete_endpoint(self):
+ endpoint_ref = self.new_endpoint_ref(service_id=self.service_id)
+ self.catalog_api.create_endpoint(endpoint_ref['id'], endpoint_ref)
+ self.catalog_api.delete_endpoint(endpoint_ref['id'])
+ self._assert_notify_sent(endpoint_ref['id'], DELETED_OPERATION,
+ 'endpoint')
+ self._assert_last_audit(endpoint_ref['id'], DELETED_OPERATION,
+ 'endpoint', cadftaxonomy.SECURITY_ENDPOINT)
+
+ def test_create_service(self):
+ service_ref = self.new_service_ref()
+ self.catalog_api.create_service(service_ref['id'], service_ref)
+ self._assert_notify_sent(service_ref['id'], CREATED_OPERATION,
+ 'service')
+ self._assert_last_audit(service_ref['id'], CREATED_OPERATION,
+ 'service', cadftaxonomy.SECURITY_SERVICE)
+
+ def test_update_service(self):
+ service_ref = self.new_service_ref()
+ self.catalog_api.create_service(service_ref['id'], service_ref)
+ self.catalog_api.update_service(service_ref['id'], service_ref)
+ self._assert_notify_sent(service_ref['id'], UPDATED_OPERATION,
+ 'service')
+ self._assert_last_audit(service_ref['id'], UPDATED_OPERATION,
+ 'service', cadftaxonomy.SECURITY_SERVICE)
+
+ def test_delete_service(self):
+ service_ref = self.new_service_ref()
+ self.catalog_api.create_service(service_ref['id'], service_ref)
+ self.catalog_api.delete_service(service_ref['id'])
+ self._assert_notify_sent(service_ref['id'], DELETED_OPERATION,
+ 'service')
+ self._assert_last_audit(service_ref['id'], DELETED_OPERATION,
+ 'service', cadftaxonomy.SECURITY_SERVICE)
+
+ def test_create_region(self):
+ region_ref = self.new_region_ref()
+ self.catalog_api.create_region(region_ref)
+ self._assert_notify_sent(region_ref['id'], CREATED_OPERATION,
+ 'region')
+ self._assert_last_audit(region_ref['id'], CREATED_OPERATION,
+ 'region', cadftaxonomy.SECURITY_REGION)
+
+ def test_update_region(self):
+ region_ref = self.new_region_ref()
+ self.catalog_api.create_region(region_ref)
+ self.catalog_api.update_region(region_ref['id'], region_ref)
+ self._assert_notify_sent(region_ref['id'], UPDATED_OPERATION,
+ 'region')
+ self._assert_last_audit(region_ref['id'], UPDATED_OPERATION,
+ 'region', cadftaxonomy.SECURITY_REGION)
+
+ def test_delete_region(self):
+ region_ref = self.new_region_ref()
+ self.catalog_api.create_region(region_ref)
+ self.catalog_api.delete_region(region_ref['id'])
+ self._assert_notify_sent(region_ref['id'], DELETED_OPERATION,
+ 'region')
+ self._assert_last_audit(region_ref['id'], DELETED_OPERATION,
+ 'region', cadftaxonomy.SECURITY_REGION)
+
+ def test_create_policy(self):
+ policy_ref = self.new_policy_ref()
+ self.policy_api.create_policy(policy_ref['id'], policy_ref)
+ self._assert_notify_sent(policy_ref['id'], CREATED_OPERATION,
+ 'policy')
+ self._assert_last_audit(policy_ref['id'], CREATED_OPERATION,
+ 'policy', cadftaxonomy.SECURITY_POLICY)
+
+ def test_update_policy(self):
+ policy_ref = self.new_policy_ref()
+ self.policy_api.create_policy(policy_ref['id'], policy_ref)
+ self.policy_api.update_policy(policy_ref['id'], policy_ref)
+ self._assert_notify_sent(policy_ref['id'], UPDATED_OPERATION,
+ 'policy')
+ self._assert_last_audit(policy_ref['id'], UPDATED_OPERATION,
+ 'policy', cadftaxonomy.SECURITY_POLICY)
+
+ def test_delete_policy(self):
+ policy_ref = self.new_policy_ref()
+ self.policy_api.create_policy(policy_ref['id'], policy_ref)
+ self.policy_api.delete_policy(policy_ref['id'])
+ self._assert_notify_sent(policy_ref['id'], DELETED_OPERATION,
+ 'policy')
+ self._assert_last_audit(policy_ref['id'], DELETED_OPERATION,
+ 'policy', cadftaxonomy.SECURITY_POLICY)
+
+ def test_disable_domain(self):
+ domain_ref = self.new_domain_ref()
+ self.assignment_api.create_domain(domain_ref['id'], domain_ref)
+ domain_ref['enabled'] = False
+ self.assignment_api.update_domain(domain_ref['id'], domain_ref)
+ self._assert_notify_sent(domain_ref['id'], 'disabled', 'domain',
+ public=False)
+
+ def test_disable_of_disabled_domain_does_not_notify(self):
+ domain_ref = self.new_domain_ref()
+ domain_ref['enabled'] = False
+ self.assignment_api.create_domain(domain_ref['id'], domain_ref)
+ # The domain_ref above is not changed during the create process. We
+ # can use the same ref to perform the update.
+ self.assignment_api.update_domain(domain_ref['id'], domain_ref)
+ self._assert_notify_not_sent(domain_ref['id'], 'disabled', 'domain',
+ public=False)
+
+ def test_update_group(self):
+ group_ref = self.new_group_ref(domain_id=self.domain_id)
+ group_ref = self.identity_api.create_group(group_ref)
+ self.identity_api.update_group(group_ref['id'], group_ref)
+ self._assert_last_note(group_ref['id'], UPDATED_OPERATION, 'group')
+ self._assert_last_audit(group_ref['id'], UPDATED_OPERATION, 'group',
+ cadftaxonomy.SECURITY_GROUP)
+
+ def test_update_project(self):
+ project_ref = self.new_project_ref(domain_id=self.domain_id)
+ self.assignment_api.create_project(project_ref['id'], project_ref)
+ self.assignment_api.update_project(project_ref['id'], project_ref)
+ self._assert_notify_sent(
+ project_ref['id'], UPDATED_OPERATION, 'project', public=True)
+ self._assert_last_audit(project_ref['id'], UPDATED_OPERATION,
+ 'project', cadftaxonomy.SECURITY_PROJECT)
+
+ def test_disable_project(self):
+ project_ref = self.new_project_ref(domain_id=self.domain_id)
+ self.assignment_api.create_project(project_ref['id'], project_ref)
+ project_ref['enabled'] = False
+ self.assignment_api.update_project(project_ref['id'], project_ref)
+ self._assert_notify_sent(project_ref['id'], 'disabled', 'project',
+ public=False)
+
+ def test_disable_of_disabled_project_does_not_notify(self):
+ project_ref = self.new_project_ref(domain_id=self.domain_id)
+ project_ref['enabled'] = False
+ self.assignment_api.create_project(project_ref['id'], project_ref)
+ # The project_ref above is not changed during the create process. We
+ # can use the same ref to perform the update.
+ self.assignment_api.update_project(project_ref['id'], project_ref)
+ self._assert_notify_not_sent(project_ref['id'], 'disabled', 'project',
+ public=False)
+
+ def test_update_project_does_not_send_disable(self):
+ project_ref = self.new_project_ref(domain_id=self.domain_id)
+ self.assignment_api.create_project(project_ref['id'], project_ref)
+ project_ref['enabled'] = True
+ self.assignment_api.update_project(project_ref['id'], project_ref)
+ self._assert_last_note(
+ project_ref['id'], UPDATED_OPERATION, 'project')
+ self._assert_notify_not_sent(project_ref['id'], 'disabled', 'project')
+
+ def test_update_role(self):
+ role_ref = self.new_role_ref()
+ self.role_api.create_role(role_ref['id'], role_ref)
+ self.role_api.update_role(role_ref['id'], role_ref)
+ self._assert_last_note(role_ref['id'], UPDATED_OPERATION, 'role')
+ self._assert_last_audit(role_ref['id'], UPDATED_OPERATION, 'role',
+ cadftaxonomy.SECURITY_ROLE)
+
+ def test_update_user(self):
+ user_ref = self.new_user_ref(domain_id=self.domain_id)
+ user_ref = self.identity_api.create_user(user_ref)
+ self.identity_api.update_user(user_ref['id'], user_ref)
+ self._assert_last_note(user_ref['id'], UPDATED_OPERATION, 'user')
+ self._assert_last_audit(user_ref['id'], UPDATED_OPERATION, 'user',
+ cadftaxonomy.SECURITY_ACCOUNT_USER)
+
+ def test_config_option_no_events(self):
+ self.config_fixture.config(notification_format='basic')
+ role_ref = self.new_role_ref()
+ self.role_api.create_role(role_ref['id'], role_ref)
+ # The regular notifications will still be emitted, since they are
+ # used for callback handling.
+ self._assert_last_note(role_ref['id'], CREATED_OPERATION, 'role')
+ # No audit event should have occurred
+ self.assertEqual(0, len(self._audits))
+
+
+class CADFNotificationsForEntities(NotificationsForEntities):
+
+ def setUp(self):
+ super(CADFNotificationsForEntities, self).setUp()
+ self.config_fixture.config(notification_format='cadf')
+
+ def test_initiator_data_is_set(self):
+ ref = self.new_domain_ref()
+ resp = self.post('/domains', body={'domain': ref})
+ resource_id = resp.result.get('domain').get('id')
+ self._assert_last_audit(resource_id, CREATED_OPERATION, 'domain',
+ cadftaxonomy.SECURITY_DOMAIN)
+ self.assertTrue(len(self._audits) > 0)
+ audit = self._audits[-1]
+ payload = audit['payload']
+ self.assertEqual(self.user_id, payload['initiator']['id'])
+ self.assertEqual(self.project_id, payload['initiator']['project_id'])
+
+
+class TestEventCallbacks(test_v3.RestfulTestCase):
+
+ def setUp(self):
+ super(TestEventCallbacks, self).setUp()
+ self.has_been_called = False
+
+ def _project_deleted_callback(self, service, resource_type, operation,
+ payload):
+ self.has_been_called = True
+
+ def _project_created_callback(self, service, resource_type, operation,
+ payload):
+ self.has_been_called = True
+
+ def test_notification_received(self):
+ callback = register_callback(CREATED_OPERATION, 'project')
+ project_ref = self.new_project_ref(domain_id=self.domain_id)
+ self.assignment_api.create_project(project_ref['id'], project_ref)
+ self.assertTrue(callback.called)
+
+ def test_notification_method_not_callable(self):
+ fake_method = None
+ self.assertRaises(TypeError,
+ notifications.register_event_callback,
+ UPDATED_OPERATION,
+ 'project',
+ [fake_method])
+
+ def test_notification_event_not_valid(self):
+ self.assertRaises(ValueError,
+ notifications.register_event_callback,
+ uuid.uuid4().hex,
+ 'project',
+ self._project_deleted_callback)
+
+ def test_event_registration_for_unknown_resource_type(self):
+ # Registration for unknown resource types should succeed. If no event
+ # is issued for that resource type, the callback wont be triggered.
+ notifications.register_event_callback(DELETED_OPERATION,
+ uuid.uuid4().hex,
+ self._project_deleted_callback)
+ resource_type = uuid.uuid4().hex
+ notifications.register_event_callback(DELETED_OPERATION,
+ resource_type,
+ self._project_deleted_callback)
+
+ def test_provider_event_callbacks_subscription(self):
+ callback_called = []
+
+ @dependency.provider('foo_api')
+ class Foo(object):
+ def __init__(self):
+ self.event_callbacks = {
+ CREATED_OPERATION: {'project': [self.foo_callback]}}
+
+ def foo_callback(self, service, resource_type, operation,
+ payload):
+ # uses callback_called from the closure
+ callback_called.append(True)
+
+ Foo()
+ project_ref = self.new_project_ref(domain_id=self.domain_id)
+ self.assignment_api.create_project(project_ref['id'], project_ref)
+ self.assertEqual([True], callback_called)
+
+ def test_invalid_event_callbacks(self):
+ @dependency.provider('foo_api')
+ class Foo(object):
+ def __init__(self):
+ self.event_callbacks = 'bogus'
+
+ self.assertRaises(ValueError, Foo)
+
+ def test_invalid_event_callbacks_event(self):
+ @dependency.provider('foo_api')
+ class Foo(object):
+ def __init__(self):
+ self.event_callbacks = {CREATED_OPERATION: 'bogus'}
+
+ self.assertRaises(ValueError, Foo)
+
+
+class CadfNotificationsWrapperTestCase(test_v3.RestfulTestCase):
+
+ LOCAL_HOST = 'localhost'
+ ACTION = 'authenticate'
+ ROLE_ASSIGNMENT = 'role_assignment'
+
+ def setUp(self):
+ super(CadfNotificationsWrapperTestCase, self).setUp()
+ self._notifications = []
+
+ def fake_notify(action, initiator, outcome, target,
+ event_type, **kwargs):
+ service_security = cadftaxonomy.SERVICE_SECURITY
+
+ event = eventfactory.EventFactory().new_event(
+ eventType=cadftype.EVENTTYPE_ACTIVITY,
+ outcome=outcome,
+ action=action,
+ initiator=initiator,
+ target=target,
+ observer=cadfresource.Resource(typeURI=service_security))
+
+ for key, value in kwargs.items():
+ setattr(event, key, value)
+
+ note = {
+ 'action': action,
+ 'initiator': initiator,
+ 'event': event,
+ 'send_notification_called': True}
+ self._notifications.append(note)
+
+ self.useFixture(mockpatch.PatchObject(
+ notifications, '_send_audit_notification', fake_notify))
+
+ def _assert_last_note(self, action, user_id):
+ self.assertTrue(self._notifications)
+ note = self._notifications[-1]
+ self.assertEqual(note['action'], action)
+ initiator = note['initiator']
+ self.assertEqual(initiator.id, user_id)
+ self.assertEqual(initiator.host.address, self.LOCAL_HOST)
+ self.assertTrue(note['send_notification_called'])
+
+ def _assert_event(self, role_id, project=None, domain=None,
+ user=None, group=None, inherit=False):
+ """Assert that the CADF event is valid.
+
+ In the case of role assignments, the event will have extra data,
+ specifically, the role, target, actor, and if the role is inherited.
+
+ An example event, as a dictionary is seen below:
+ {
+ 'typeURI': 'http://schemas.dmtf.org/cloud/audit/1.0/event',
+ 'initiator': {
+ 'typeURI': 'service/security/account/user',
+ 'host': {'address': 'localhost'},
+ 'id': 'openstack:0a90d95d-582c-4efb-9cbc-e2ca7ca9c341',
+ 'name': u'bccc2d9bfc2a46fd9e33bcf82f0b5c21'
+ },
+ 'target': {
+ 'typeURI': 'service/security/account/user',
+ 'id': 'openstack:d48ea485-ef70-4f65-8d2b-01aa9d7ec12d'
+ },
+ 'observer': {
+ 'typeURI': 'service/security',
+ 'id': 'openstack:d51dd870-d929-4aba-8d75-dcd7555a0c95'
+ },
+ 'eventType': 'activity',
+ 'eventTime': '2014-08-21T21:04:56.204536+0000',
+ 'role': u'0e6b990380154a2599ce6b6e91548a68',
+ 'domain': u'24bdcff1aab8474895dbaac509793de1',
+ 'inherited_to_projects': False,
+ 'group': u'c1e22dc67cbd469ea0e33bf428fe597a',
+ 'action': 'created.role_assignment',
+ 'outcome': 'success',
+ 'id': 'openstack:782689dd-f428-4f13-99c7-5c70f94a5ac1'
+ }
+ """
+
+ note = self._notifications[-1]
+ event = note['event']
+ if project:
+ self.assertEqual(project, event.project)
+ if domain:
+ self.assertEqual(domain, event.domain)
+ if user:
+ self.assertEqual(user, event.user)
+ if group:
+ self.assertEqual(group, event.group)
+ self.assertEqual(role_id, event.role)
+ self.assertEqual(inherit, event.inherited_to_projects)
+
+ def test_v3_authenticate_user_name_and_domain_id(self):
+ user_id = self.user_id
+ user_name = self.user['name']
+ password = self.user['password']
+ domain_id = self.domain_id
+ data = self.build_authentication_request(username=user_name,
+ user_domain_id=domain_id,
+ password=password)
+ self.post('/auth/tokens', body=data)
+ self._assert_last_note(self.ACTION, user_id)
+
+ def test_v3_authenticate_user_id(self):
+ user_id = self.user_id
+ password = self.user['password']
+ data = self.build_authentication_request(user_id=user_id,
+ password=password)
+ self.post('/auth/tokens', body=data)
+ self._assert_last_note(self.ACTION, user_id)
+
+ def test_v3_authenticate_user_name_and_domain_name(self):
+ user_id = self.user_id
+ user_name = self.user['name']
+ password = self.user['password']
+ domain_name = self.domain['name']
+ data = self.build_authentication_request(username=user_name,
+ user_domain_name=domain_name,
+ password=password)
+ self.post('/auth/tokens', body=data)
+ self._assert_last_note(self.ACTION, user_id)
+
+ def _test_role_assignment(self, url, role, project=None, domain=None,
+ user=None, group=None):
+ self.put(url)
+ action = "%s.%s" % (CREATED_OPERATION, self.ROLE_ASSIGNMENT)
+ self._assert_last_note(action, self.user_id)
+ self._assert_event(role, project, domain, user, group)
+ self.delete(url)
+ action = "%s.%s" % (DELETED_OPERATION, self.ROLE_ASSIGNMENT)
+ self._assert_last_note(action, self.user_id)
+ self._assert_event(role, project, domain, user, group)
+
+ def test_user_project_grant(self):
+ url = ('/projects/%s/users/%s/roles/%s' %
+ (self.project_id, self.user_id, self.role_id))
+ self._test_role_assignment(url, self.role_id,
+ project=self.project_id,
+ user=self.user_id)
+
+ def test_group_domain_grant(self):
+ group_ref = self.new_group_ref(domain_id=self.domain_id)
+ group = self.identity_api.create_group(group_ref)
+ url = ('/domains/%s/groups/%s/roles/%s' %
+ (self.domain_id, group['id'], self.role_id))
+ self._test_role_assignment(url, self.role_id,
+ domain=self.domain_id,
+ group=group['id'])
+
+
+class TestCallbackRegistration(testtools.TestCase):
+ def setUp(self):
+ super(TestCallbackRegistration, self).setUp()
+ self.mock_log = mock.Mock()
+ # Force the callback logging to occur
+ self.mock_log.logger.getEffectiveLevel.return_value = logging.DEBUG
+
+ def verify_log_message(self, data):
+ """Tests that use this are a little brittle because adding more
+ logging can break them.
+
+ TODO(dstanek): remove the need for this in a future refactoring
+
+ """
+ log_fn = self.mock_log.debug
+ self.assertEqual(len(data), log_fn.call_count)
+ for datum in data:
+ log_fn.assert_any_call(mock.ANY, datum)
+
+ def test_a_function_callback(self):
+ def callback(*args, **kwargs):
+ pass
+
+ resource_type = 'thing'
+ with mock.patch('keystone.notifications.LOG', self.mock_log):
+ notifications.register_event_callback(
+ CREATED_OPERATION, resource_type, callback)
+
+ callback = 'keystone.tests.unit.common.test_notifications.callback'
+ expected_log_data = {
+ 'callback': callback,
+ 'event': 'identity.%s.created' % resource_type
+ }
+ self.verify_log_message([expected_log_data])
+
+ def test_a_method_callback(self):
+ class C(object):
+ def callback(self, *args, **kwargs):
+ pass
+
+ with mock.patch('keystone.notifications.LOG', self.mock_log):
+ notifications.register_event_callback(
+ CREATED_OPERATION, 'thing', C.callback)
+
+ callback = 'keystone.tests.unit.common.test_notifications.C.callback'
+ expected_log_data = {
+ 'callback': callback,
+ 'event': 'identity.thing.created'
+ }
+ self.verify_log_message([expected_log_data])
+
+ def test_a_list_of_callbacks(self):
+ def callback(*args, **kwargs):
+ pass
+
+ class C(object):
+ def callback(self, *args, **kwargs):
+ pass
+
+ with mock.patch('keystone.notifications.LOG', self.mock_log):
+ notifications.register_event_callback(
+ CREATED_OPERATION, 'thing', [callback, C.callback])
+
+ callback_1 = 'keystone.tests.unit.common.test_notifications.callback'
+ callback_2 = 'keystone.tests.unit.common.test_notifications.C.callback'
+ expected_log_data = [
+ {
+ 'callback': callback_1,
+ 'event': 'identity.thing.created'
+ },
+ {
+ 'callback': callback_2,
+ 'event': 'identity.thing.created'
+ },
+ ]
+ self.verify_log_message(expected_log_data)
+
+ def test_an_invalid_callback(self):
+ self.assertRaises(TypeError,
+ notifications.register_event_callback,
+ (CREATED_OPERATION, 'thing', object()))
+
+ def test_an_invalid_event(self):
+ def callback(*args, **kwargs):
+ pass
+
+ self.assertRaises(ValueError,
+ notifications.register_event_callback,
+ uuid.uuid4().hex,
+ 'thing',
+ callback)
diff --git a/keystone-moon/keystone/tests/unit/common/test_pemutils.py b/keystone-moon/keystone/tests/unit/common/test_pemutils.py
new file mode 100644
index 00000000..c2f58518
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/common/test_pemutils.py
@@ -0,0 +1,337 @@
+# Copyright 2013 Red Hat, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import base64
+
+from six import moves
+
+from keystone.common import pemutils
+from keystone.tests import unit as tests
+
+
+# List of 2-tuples, (pem_type, pem_header)
+headers = pemutils.PEM_TYPE_TO_HEADER.items()
+
+
+def make_data(size, offset=0):
+ return ''.join([chr(x % 255) for x in moves.range(offset, size + offset)])
+
+
+def make_base64_from_data(data):
+ return base64.b64encode(data)
+
+
+def wrap_base64(base64_text):
+ wrapped_text = '\n'.join([base64_text[x:x + 64]
+ for x in moves.range(0, len(base64_text), 64)])
+ wrapped_text += '\n'
+ return wrapped_text
+
+
+def make_pem(header, data):
+ base64_text = make_base64_from_data(data)
+ wrapped_text = wrap_base64(base64_text)
+
+ result = '-----BEGIN %s-----\n' % header
+ result += wrapped_text
+ result += '-----END %s-----\n' % header
+
+ return result
+
+
+class PEM(object):
+ """PEM text and it's associated data broken out, used for testing.
+
+ """
+ def __init__(self, pem_header='CERTIFICATE', pem_type='cert',
+ data_size=70, data_offset=0):
+ self.pem_header = pem_header
+ self.pem_type = pem_type
+ self.data_size = data_size
+ self.data_offset = data_offset
+ self.data = make_data(self.data_size, self.data_offset)
+ self.base64_text = make_base64_from_data(self.data)
+ self.wrapped_base64 = wrap_base64(self.base64_text)
+ self.pem_text = make_pem(self.pem_header, self.data)
+
+
+class TestPEMParseResult(tests.BaseTestCase):
+
+ def test_pem_types(self):
+ for pem_type in pemutils.pem_types:
+ pem_header = pemutils.PEM_TYPE_TO_HEADER[pem_type]
+ r = pemutils.PEMParseResult(pem_type=pem_type)
+ self.assertEqual(pem_type, r.pem_type)
+ self.assertEqual(pem_header, r.pem_header)
+
+ pem_type = 'xxx'
+ self.assertRaises(ValueError,
+ pemutils.PEMParseResult, pem_type=pem_type)
+
+ def test_pem_headers(self):
+ for pem_header in pemutils.pem_headers:
+ pem_type = pemutils.PEM_HEADER_TO_TYPE[pem_header]
+ r = pemutils.PEMParseResult(pem_header=pem_header)
+ self.assertEqual(pem_type, r.pem_type)
+ self.assertEqual(pem_header, r.pem_header)
+
+ pem_header = 'xxx'
+ self.assertRaises(ValueError,
+ pemutils.PEMParseResult, pem_header=pem_header)
+
+
+class TestPEMParse(tests.BaseTestCase):
+ def test_parse_none(self):
+ text = ''
+ text += 'bla bla\n'
+ text += 'yada yada yada\n'
+ text += 'burfl blatz bingo\n'
+
+ parse_results = pemutils.parse_pem(text)
+ self.assertEqual(0, len(parse_results))
+
+ self.assertEqual(False, pemutils.is_pem(text))
+
+ def test_parse_invalid(self):
+ p = PEM(pem_type='xxx',
+ pem_header='XXX')
+ text = p.pem_text
+
+ self.assertRaises(ValueError,
+ pemutils.parse_pem, text)
+
+ def test_parse_one(self):
+ data_size = 70
+ count = len(headers)
+ pems = []
+
+ for i in moves.range(count):
+ pems.append(PEM(pem_type=headers[i][0],
+ pem_header=headers[i][1],
+ data_size=data_size + i,
+ data_offset=i))
+
+ for i in moves.range(count):
+ p = pems[i]
+ text = p.pem_text
+
+ parse_results = pemutils.parse_pem(text)
+ self.assertEqual(1, len(parse_results))
+
+ r = parse_results[0]
+ self.assertEqual(p.pem_type, r.pem_type)
+ self.assertEqual(p.pem_header, r.pem_header)
+ self.assertEqual(p.pem_text,
+ text[r.pem_start:r.pem_end])
+ self.assertEqual(p.wrapped_base64,
+ text[r.base64_start:r.base64_end])
+ self.assertEqual(p.data, r.binary_data)
+
+ def test_parse_one_embedded(self):
+ p = PEM(data_offset=0)
+ text = ''
+ text += 'bla bla\n'
+ text += 'yada yada yada\n'
+ text += p.pem_text
+ text += 'burfl blatz bingo\n'
+
+ parse_results = pemutils.parse_pem(text)
+ self.assertEqual(1, len(parse_results))
+
+ r = parse_results[0]
+ self.assertEqual(p.pem_type, r.pem_type)
+ self.assertEqual(p.pem_header, r.pem_header)
+ self.assertEqual(p.pem_text,
+ text[r.pem_start:r.pem_end])
+ self.assertEqual(p.wrapped_base64,
+ text[r.base64_start: r.base64_end])
+ self.assertEqual(p.data, r.binary_data)
+
+ def test_parse_multple(self):
+ data_size = 70
+ count = len(headers)
+ pems = []
+ text = ''
+
+ for i in moves.range(count):
+ pems.append(PEM(pem_type=headers[i][0],
+ pem_header=headers[i][1],
+ data_size=data_size + i,
+ data_offset=i))
+
+ for i in moves.range(count):
+ text += pems[i].pem_text
+
+ parse_results = pemutils.parse_pem(text)
+ self.assertEqual(count, len(parse_results))
+
+ for i in moves.range(count):
+ r = parse_results[i]
+ p = pems[i]
+
+ self.assertEqual(p.pem_type, r.pem_type)
+ self.assertEqual(p.pem_header, r.pem_header)
+ self.assertEqual(p.pem_text,
+ text[r.pem_start:r.pem_end])
+ self.assertEqual(p.wrapped_base64,
+ text[r.base64_start: r.base64_end])
+ self.assertEqual(p.data, r.binary_data)
+
+ def test_parse_multple_find_specific(self):
+ data_size = 70
+ count = len(headers)
+ pems = []
+ text = ''
+
+ for i in moves.range(count):
+ pems.append(PEM(pem_type=headers[i][0],
+ pem_header=headers[i][1],
+ data_size=data_size + i,
+ data_offset=i))
+
+ for i in moves.range(count):
+ text += pems[i].pem_text
+
+ for i in moves.range(count):
+ parse_results = pemutils.parse_pem(text, pem_type=headers[i][0])
+ self.assertEqual(1, len(parse_results))
+
+ r = parse_results[0]
+ p = pems[i]
+
+ self.assertEqual(p.pem_type, r.pem_type)
+ self.assertEqual(p.pem_header, r.pem_header)
+ self.assertEqual(p.pem_text,
+ text[r.pem_start:r.pem_end])
+ self.assertEqual(p.wrapped_base64,
+ text[r.base64_start:r.base64_end])
+ self.assertEqual(p.data, r.binary_data)
+
+ def test_parse_multple_embedded(self):
+ data_size = 75
+ count = len(headers)
+ pems = []
+ text = ''
+
+ for i in moves.range(count):
+ pems.append(PEM(pem_type=headers[i][0],
+ pem_header=headers[i][1],
+ data_size=data_size + i,
+ data_offset=i))
+
+ for i in moves.range(count):
+ text += 'bla bla\n'
+ text += 'yada yada yada\n'
+ text += pems[i].pem_text
+ text += 'burfl blatz bingo\n'
+
+ parse_results = pemutils.parse_pem(text)
+ self.assertEqual(count, len(parse_results))
+
+ for i in moves.range(count):
+ r = parse_results[i]
+ p = pems[i]
+
+ self.assertEqual(p.pem_type, r.pem_type)
+ self.assertEqual(p.pem_header, r.pem_header)
+ self.assertEqual(p.pem_text,
+ text[r.pem_start:r.pem_end])
+ self.assertEqual(p.wrapped_base64,
+ text[r.base64_start:r.base64_end])
+ self.assertEqual(p.data, r.binary_data)
+
+ def test_get_pem_data_none(self):
+ text = ''
+ text += 'bla bla\n'
+ text += 'yada yada yada\n'
+ text += 'burfl blatz bingo\n'
+
+ data = pemutils.get_pem_data(text)
+ self.assertIsNone(data)
+
+ def test_get_pem_data_invalid(self):
+ p = PEM(pem_type='xxx',
+ pem_header='XXX')
+ text = p.pem_text
+
+ self.assertRaises(ValueError,
+ pemutils.get_pem_data, text)
+
+ def test_get_pem_data(self):
+ data_size = 70
+ count = len(headers)
+ pems = []
+
+ for i in moves.range(count):
+ pems.append(PEM(pem_type=headers[i][0],
+ pem_header=headers[i][1],
+ data_size=data_size + i,
+ data_offset=i))
+
+ for i in moves.range(count):
+ p = pems[i]
+ text = p.pem_text
+
+ data = pemutils.get_pem_data(text, p.pem_type)
+ self.assertEqual(p.data, data)
+
+ def test_is_pem(self):
+ data_size = 70
+ count = len(headers)
+ pems = []
+
+ for i in moves.range(count):
+ pems.append(PEM(pem_type=headers[i][0],
+ pem_header=headers[i][1],
+ data_size=data_size + i,
+ data_offset=i))
+
+ for i in moves.range(count):
+ p = pems[i]
+ text = p.pem_text
+ self.assertTrue(pemutils.is_pem(text, pem_type=p.pem_type))
+ self.assertFalse(pemutils.is_pem(text,
+ pem_type=p.pem_type + 'xxx'))
+
+ def test_base64_to_pem(self):
+ data_size = 70
+ count = len(headers)
+ pems = []
+
+ for i in moves.range(count):
+ pems.append(PEM(pem_type=headers[i][0],
+ pem_header=headers[i][1],
+ data_size=data_size + i,
+ data_offset=i))
+
+ for i in moves.range(count):
+ p = pems[i]
+ pem = pemutils.base64_to_pem(p.base64_text, p.pem_type)
+ self.assertEqual(pemutils.get_pem_data(pem, p.pem_type), p.data)
+
+ def test_binary_to_pem(self):
+ data_size = 70
+ count = len(headers)
+ pems = []
+
+ for i in moves.range(count):
+ pems.append(PEM(pem_type=headers[i][0],
+ pem_header=headers[i][1],
+ data_size=data_size + i,
+ data_offset=i))
+
+ for i in moves.range(count):
+ p = pems[i]
+ pem = pemutils.binary_to_pem(p.data, p.pem_type)
+ self.assertEqual(pemutils.get_pem_data(pem, p.pem_type), p.data)
diff --git a/keystone-moon/keystone/tests/unit/common/test_sql_core.py b/keystone-moon/keystone/tests/unit/common/test_sql_core.py
new file mode 100644
index 00000000..1f33cfc3
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/common/test_sql_core.py
@@ -0,0 +1,52 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+
+from sqlalchemy.ext import declarative
+
+from keystone.common import sql
+from keystone.tests import unit as tests
+from keystone.tests.unit import utils
+
+
+ModelBase = declarative.declarative_base()
+
+
+class TestModel(ModelBase, sql.ModelDictMixin):
+ __tablename__ = 'testmodel'
+ id = sql.Column(sql.String(64), primary_key=True)
+ text = sql.Column(sql.String(64), nullable=False)
+
+
+class TestModelDictMixin(tests.BaseTestCase):
+
+ def test_creating_a_model_instance_from_a_dict(self):
+ d = {'id': utils.new_uuid(), 'text': utils.new_uuid()}
+ m = TestModel.from_dict(d)
+ self.assertEqual(m.id, d['id'])
+ self.assertEqual(m.text, d['text'])
+
+ def test_creating_a_dict_from_a_model_instance(self):
+ m = TestModel(id=utils.new_uuid(), text=utils.new_uuid())
+ d = m.to_dict()
+ self.assertEqual(m.id, d['id'])
+ self.assertEqual(m.text, d['text'])
+
+ def test_creating_a_model_instance_from_an_invalid_dict(self):
+ d = {'id': utils.new_uuid(), 'text': utils.new_uuid(), 'extra': None}
+ self.assertRaises(TypeError, TestModel.from_dict, d)
+
+ def test_creating_a_dict_from_a_model_instance_that_has_extra_attrs(self):
+ expected = {'id': utils.new_uuid(), 'text': utils.new_uuid()}
+ m = TestModel(id=expected['id'], text=expected['text'])
+ m.extra = 'this should not be in the dictionary'
+ self.assertEqual(m.to_dict(), expected)
diff --git a/keystone-moon/keystone/tests/unit/common/test_utils.py b/keystone-moon/keystone/tests/unit/common/test_utils.py
new file mode 100644
index 00000000..184c8141
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/common/test_utils.py
@@ -0,0 +1,164 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import datetime
+import uuid
+
+from oslo_config import cfg
+from oslo_config import fixture as config_fixture
+from oslo_serialization import jsonutils
+
+from keystone.common import utils as common_utils
+from keystone import exception
+from keystone import service
+from keystone.tests import unit as tests
+from keystone.tests.unit import utils
+
+
+CONF = cfg.CONF
+
+TZ = utils.TZ
+
+
+class UtilsTestCase(tests.BaseTestCase):
+ OPTIONAL = object()
+
+ def setUp(self):
+ super(UtilsTestCase, self).setUp()
+ self.config_fixture = self.useFixture(config_fixture.Config(CONF))
+
+ def test_hash(self):
+ password = 'right'
+ wrong = 'wrongwrong' # Two wrongs don't make a right
+ hashed = common_utils.hash_password(password)
+ self.assertTrue(common_utils.check_password(password, hashed))
+ self.assertFalse(common_utils.check_password(wrong, hashed))
+
+ def test_verify_normal_password_strict(self):
+ self.config_fixture.config(strict_password_check=False)
+ password = uuid.uuid4().hex
+ verified = common_utils.verify_length_and_trunc_password(password)
+ self.assertEqual(password, verified)
+
+ def test_that_a_hash_can_not_be_validated_against_a_hash(self):
+ # NOTE(dstanek): Bug 1279849 reported a problem where passwords
+ # were not being hashed if they already looked like a hash. This
+ # would allow someone to hash their password ahead of time
+ # (potentially getting around password requirements, like
+ # length) and then they could auth with their original password.
+ password = uuid.uuid4().hex
+ hashed_password = common_utils.hash_password(password)
+ new_hashed_password = common_utils.hash_password(hashed_password)
+ self.assertFalse(common_utils.check_password(password,
+ new_hashed_password))
+
+ def test_verify_long_password_strict(self):
+ self.config_fixture.config(strict_password_check=False)
+ self.config_fixture.config(group='identity', max_password_length=5)
+ max_length = CONF.identity.max_password_length
+ invalid_password = 'passw0rd'
+ trunc = common_utils.verify_length_and_trunc_password(invalid_password)
+ self.assertEqual(invalid_password[:max_length], trunc)
+
+ def test_verify_long_password_strict_raises_exception(self):
+ self.config_fixture.config(strict_password_check=True)
+ self.config_fixture.config(group='identity', max_password_length=5)
+ invalid_password = 'passw0rd'
+ self.assertRaises(exception.PasswordVerificationError,
+ common_utils.verify_length_and_trunc_password,
+ invalid_password)
+
+ def test_hash_long_password_truncation(self):
+ self.config_fixture.config(strict_password_check=False)
+ invalid_length_password = '0' * 9999999
+ hashed = common_utils.hash_password(invalid_length_password)
+ self.assertTrue(common_utils.check_password(invalid_length_password,
+ hashed))
+
+ def test_hash_long_password_strict(self):
+ self.config_fixture.config(strict_password_check=True)
+ invalid_length_password = '0' * 9999999
+ self.assertRaises(exception.PasswordVerificationError,
+ common_utils.hash_password,
+ invalid_length_password)
+
+ def _create_test_user(self, password=OPTIONAL):
+ user = {"name": "hthtest"}
+ if password is not self.OPTIONAL:
+ user['password'] = password
+
+ return user
+
+ def test_hash_user_password_without_password(self):
+ user = self._create_test_user()
+ hashed = common_utils.hash_user_password(user)
+ self.assertEqual(user, hashed)
+
+ def test_hash_user_password_with_null_password(self):
+ user = self._create_test_user(password=None)
+ hashed = common_utils.hash_user_password(user)
+ self.assertEqual(user, hashed)
+
+ def test_hash_user_password_with_empty_password(self):
+ password = ''
+ user = self._create_test_user(password=password)
+ user_hashed = common_utils.hash_user_password(user)
+ password_hashed = user_hashed['password']
+ self.assertTrue(common_utils.check_password(password, password_hashed))
+
+ def test_hash_edge_cases(self):
+ hashed = common_utils.hash_password('secret')
+ self.assertFalse(common_utils.check_password('', hashed))
+ self.assertFalse(common_utils.check_password(None, hashed))
+
+ def test_hash_unicode(self):
+ password = u'Comment \xe7a va'
+ wrong = 'Comment ?a va'
+ hashed = common_utils.hash_password(password)
+ self.assertTrue(common_utils.check_password(password, hashed))
+ self.assertFalse(common_utils.check_password(wrong, hashed))
+
+ def test_auth_str_equal(self):
+ self.assertTrue(common_utils.auth_str_equal('abc123', 'abc123'))
+ self.assertFalse(common_utils.auth_str_equal('a', 'aaaaa'))
+ self.assertFalse(common_utils.auth_str_equal('aaaaa', 'a'))
+ self.assertFalse(common_utils.auth_str_equal('ABC123', 'abc123'))
+
+ def test_unixtime(self):
+ global TZ
+
+ @utils.timezone
+ def _test_unixtime():
+ epoch = common_utils.unixtime(dt)
+ self.assertEqual(epoch, epoch_ans, "TZ=%s" % TZ)
+
+ dt = datetime.datetime(1970, 1, 2, 3, 4, 56, 0)
+ epoch_ans = 56 + 4 * 60 + 3 * 3600 + 86400
+ for d in ['+0', '-11', '-8', '-5', '+5', '+8', '+14']:
+ TZ = 'UTC' + d
+ _test_unixtime()
+
+ def test_pki_encoder(self):
+ data = {'field': 'value'}
+ json = jsonutils.dumps(data, cls=common_utils.PKIEncoder)
+ expected_json = b'{"field":"value"}'
+ self.assertEqual(expected_json, json)
+
+
+class ServiceHelperTests(tests.BaseTestCase):
+
+ @service.fail_gracefully
+ def _do_test(self):
+ raise Exception("Test Exc")
+
+ def test_fail_gracefully(self):
+ self.assertRaises(tests.UnexpectedExit, self._do_test)
diff --git a/keystone-moon/keystone/tests/unit/config_files/backend_db2.conf b/keystone-moon/keystone/tests/unit/config_files/backend_db2.conf
new file mode 100644
index 00000000..2bd0c1a6
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/config_files/backend_db2.conf
@@ -0,0 +1,4 @@
+#Used for running the Migrate tests against a live DB2 Server
+#See _sql_livetest.py
+[database]
+connection = ibm_db_sa://keystone:keystone@/staktest?charset=utf8
diff --git a/keystone-moon/keystone/tests/unit/config_files/backend_ldap.conf b/keystone-moon/keystone/tests/unit/config_files/backend_ldap.conf
new file mode 100644
index 00000000..32161185
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/config_files/backend_ldap.conf
@@ -0,0 +1,5 @@
+[ldap]
+url = fake://memory
+user = cn=Admin
+password = password
+suffix = cn=example,cn=com
diff --git a/keystone-moon/keystone/tests/unit/config_files/backend_ldap_pool.conf b/keystone-moon/keystone/tests/unit/config_files/backend_ldap_pool.conf
new file mode 100644
index 00000000..36fa1ac9
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/config_files/backend_ldap_pool.conf
@@ -0,0 +1,41 @@
+[ldap]
+url = fakepool://memory
+user = cn=Admin
+password = password
+backend_entities = ['Tenant', 'User', 'UserRoleAssociation', 'Role', 'Group', 'Domain']
+suffix = cn=example,cn=com
+
+# Connection pooling specific attributes
+
+# Enable LDAP connection pooling. (boolean value)
+use_pool=true
+
+# Connection pool size. (integer value)
+pool_size=5
+
+# Maximum count of reconnect trials. (integer value)
+pool_retry_max=2
+
+# Time span in seconds to wait between two reconnect trials.
+# (floating point value)
+pool_retry_delay=0.2
+
+# Connector timeout in seconds. Value -1 indicates indefinite
+# wait for response. (integer value)
+pool_connection_timeout=-1
+
+# Connection lifetime in seconds.
+# (integer value)
+pool_connection_lifetime=600
+
+# Enable LDAP connection pooling for end user authentication.
+# If use_pool is disabled, then this setting is meaningless
+# and is not used at all. (boolean value)
+use_auth_pool=true
+
+# End user auth connection pool size. (integer value)
+auth_pool_size=50
+
+# End user auth connection lifetime in seconds. (integer
+# value)
+auth_pool_connection_lifetime=60 \ No newline at end of file
diff --git a/keystone-moon/keystone/tests/unit/config_files/backend_ldap_sql.conf b/keystone-moon/keystone/tests/unit/config_files/backend_ldap_sql.conf
new file mode 100644
index 00000000..8a06f2f9
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/config_files/backend_ldap_sql.conf
@@ -0,0 +1,14 @@
+[database]
+#For a specific location file based sqlite use:
+#connection = sqlite:////tmp/keystone.db
+#To Test MySQL:
+#connection = mysql://keystone:keystone@localhost/keystone?charset=utf8
+#To Test PostgreSQL:
+#connection = postgresql://keystone:keystone@localhost/keystone?client_encoding=utf8
+idle_timeout = 200
+
+[ldap]
+url = fake://memory
+user = cn=Admin
+password = password
+suffix = cn=example,cn=com
diff --git a/keystone-moon/keystone/tests/unit/config_files/backend_liveldap.conf b/keystone-moon/keystone/tests/unit/config_files/backend_liveldap.conf
new file mode 100644
index 00000000..59cb8577
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/config_files/backend_liveldap.conf
@@ -0,0 +1,14 @@
+[ldap]
+url = ldap://localhost
+user = cn=Manager,dc=openstack,dc=org
+password = test
+suffix = dc=openstack,dc=org
+group_tree_dn = ou=UserGroups,dc=openstack,dc=org
+role_tree_dn = ou=Roles,dc=openstack,dc=org
+project_tree_dn = ou=Projects,dc=openstack,dc=org
+user_tree_dn = ou=Users,dc=openstack,dc=org
+project_enabled_emulation = True
+user_enabled_emulation = True
+user_mail_attribute = mail
+use_dumb_member = True
+
diff --git a/keystone-moon/keystone/tests/unit/config_files/backend_multi_ldap_sql.conf b/keystone-moon/keystone/tests/unit/config_files/backend_multi_ldap_sql.conf
new file mode 100644
index 00000000..2d04d83d
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/config_files/backend_multi_ldap_sql.conf
@@ -0,0 +1,9 @@
+[database]
+connection = sqlite://
+#For a file based sqlite use
+#connection = sqlite:////tmp/keystone.db
+#To Test MySQL:
+#connection = mysql://keystone:keystone@localhost/keystone?charset=utf8
+#To Test PostgreSQL:
+#connection = postgresql://keystone:keystone@localhost/keystone?client_encoding=utf8
+idle_timeout = 200
diff --git a/keystone-moon/keystone/tests/unit/config_files/backend_mysql.conf b/keystone-moon/keystone/tests/unit/config_files/backend_mysql.conf
new file mode 100644
index 00000000..d612f729
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/config_files/backend_mysql.conf
@@ -0,0 +1,4 @@
+#Used for running the Migrate tests against a live Mysql Server
+#See _sql_livetest.py
+[database]
+connection = mysql://keystone:keystone@localhost/keystone_test?charset=utf8
diff --git a/keystone-moon/keystone/tests/unit/config_files/backend_pool_liveldap.conf b/keystone-moon/keystone/tests/unit/config_files/backend_pool_liveldap.conf
new file mode 100644
index 00000000..a85f5226
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/config_files/backend_pool_liveldap.conf
@@ -0,0 +1,35 @@
+[ldap]
+url = ldap://localhost
+user = cn=Manager,dc=openstack,dc=org
+password = test
+suffix = dc=openstack,dc=org
+group_tree_dn = ou=UserGroups,dc=openstack,dc=org
+role_tree_dn = ou=Roles,dc=openstack,dc=org
+project_tree_dn = ou=Projects,dc=openstack,dc=org
+user_tree_dn = ou=Users,dc=openstack,dc=org
+project_enabled_emulation = True
+user_enabled_emulation = True
+user_mail_attribute = mail
+use_dumb_member = True
+
+# Connection pooling specific attributes
+
+# Enable LDAP connection pooling. (boolean value)
+use_pool=true
+# Connection pool size. (integer value)
+pool_size=5
+# Connection lifetime in seconds.
+# (integer value)
+pool_connection_lifetime=60
+
+# Enable LDAP connection pooling for end user authentication.
+# If use_pool is disabled, then this setting is meaningless
+# and is not used at all. (boolean value)
+use_auth_pool=true
+
+# End user auth connection pool size. (integer value)
+auth_pool_size=50
+
+# End user auth connection lifetime in seconds. (integer
+# value)
+auth_pool_connection_lifetime=300 \ No newline at end of file
diff --git a/keystone-moon/keystone/tests/unit/config_files/backend_postgresql.conf b/keystone-moon/keystone/tests/unit/config_files/backend_postgresql.conf
new file mode 100644
index 00000000..001805df
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/config_files/backend_postgresql.conf
@@ -0,0 +1,4 @@
+#Used for running the Migrate tests against a live Postgresql Server
+#See _sql_livetest.py
+[database]
+connection = postgresql://keystone:keystone@localhost/keystone_test?client_encoding=utf8
diff --git a/keystone-moon/keystone/tests/unit/config_files/backend_sql.conf b/keystone-moon/keystone/tests/unit/config_files/backend_sql.conf
new file mode 100644
index 00000000..9d401af3
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/config_files/backend_sql.conf
@@ -0,0 +1,8 @@
+[database]
+#For a specific location file based sqlite use:
+#connection = sqlite:////tmp/keystone.db
+#To Test MySQL:
+#connection = mysql://keystone:keystone@localhost/keystone?charset=utf8
+#To Test PostgreSQL:
+#connection = postgresql://keystone:keystone@localhost/keystone?client_encoding=utf8
+idle_timeout = 200
diff --git a/keystone-moon/keystone/tests/unit/config_files/backend_tls_liveldap.conf b/keystone-moon/keystone/tests/unit/config_files/backend_tls_liveldap.conf
new file mode 100644
index 00000000..d35b9139
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/config_files/backend_tls_liveldap.conf
@@ -0,0 +1,17 @@
+[ldap]
+url = ldap://
+user = dc=Manager,dc=openstack,dc=org
+password = test
+suffix = dc=openstack,dc=org
+group_tree_dn = ou=UserGroups,dc=openstack,dc=org
+role_tree_dn = ou=Roles,dc=openstack,dc=org
+project_tree_dn = ou=Projects,dc=openstack,dc=org
+user_tree_dn = ou=Users,dc=openstack,dc=org
+project_enabled_emulation = True
+user_enabled_emulation = True
+user_mail_attribute = mail
+use_dumb_member = True
+use_tls = True
+tls_cacertfile = /etc/keystone/ssl/certs/cacert.pem
+tls_cacertdir = /etc/keystone/ssl/certs/
+tls_req_cert = demand
diff --git a/keystone-moon/keystone/tests/unit/config_files/deprecated.conf b/keystone-moon/keystone/tests/unit/config_files/deprecated.conf
new file mode 100644
index 00000000..515e663a
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/config_files/deprecated.conf
@@ -0,0 +1,8 @@
+# Options in this file are deprecated. See test_config.
+
+[sql]
+# These options were deprecated in Icehouse with the switch to oslo's
+# db.sqlalchemy.
+
+connection = sqlite://deprecated
+idle_timeout = 54321
diff --git a/keystone-moon/keystone/tests/unit/config_files/deprecated_override.conf b/keystone-moon/keystone/tests/unit/config_files/deprecated_override.conf
new file mode 100644
index 00000000..1d1c926f
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/config_files/deprecated_override.conf
@@ -0,0 +1,15 @@
+# Options in this file are deprecated. See test_config.
+
+[sql]
+# These options were deprecated in Icehouse with the switch to oslo's
+# db.sqlalchemy.
+
+connection = sqlite://deprecated
+idle_timeout = 54321
+
+
+[database]
+# These are the new options from the [sql] section.
+
+connection = sqlite://new
+idle_timeout = 65432
diff --git a/keystone-moon/keystone/tests/unit/config_files/domain_configs_default_ldap_one_sql/keystone.domain1.conf b/keystone-moon/keystone/tests/unit/config_files/domain_configs_default_ldap_one_sql/keystone.domain1.conf
new file mode 100644
index 00000000..a4492a67
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/config_files/domain_configs_default_ldap_one_sql/keystone.domain1.conf
@@ -0,0 +1,5 @@
+# The domain-specific configuration file for the test domain
+# 'domain1' for use with unit tests.
+
+[identity]
+driver = keystone.identity.backends.sql.Identity \ No newline at end of file
diff --git a/keystone-moon/keystone/tests/unit/config_files/domain_configs_multi_ldap/keystone.Default.conf b/keystone-moon/keystone/tests/unit/config_files/domain_configs_multi_ldap/keystone.Default.conf
new file mode 100644
index 00000000..7049afed
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/config_files/domain_configs_multi_ldap/keystone.Default.conf
@@ -0,0 +1,14 @@
+# The domain-specific configuration file for the default domain for
+# use with unit tests.
+#
+# The domain_name of the default domain is 'Default', hence the
+# strange mix of upper/lower case in the file name.
+
+[ldap]
+url = fake://memory
+user = cn=Admin
+password = password
+suffix = cn=example,cn=com
+
+[identity]
+driver = keystone.identity.backends.ldap.Identity \ No newline at end of file
diff --git a/keystone-moon/keystone/tests/unit/config_files/domain_configs_multi_ldap/keystone.domain1.conf b/keystone-moon/keystone/tests/unit/config_files/domain_configs_multi_ldap/keystone.domain1.conf
new file mode 100644
index 00000000..6b7e2488
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/config_files/domain_configs_multi_ldap/keystone.domain1.conf
@@ -0,0 +1,11 @@
+# The domain-specific configuration file for the test domain
+# 'domain1' for use with unit tests.
+
+[ldap]
+url = fake://memory1
+user = cn=Admin
+password = password
+suffix = cn=example,cn=com
+
+[identity]
+driver = keystone.identity.backends.ldap.Identity \ No newline at end of file
diff --git a/keystone-moon/keystone/tests/unit/config_files/domain_configs_multi_ldap/keystone.domain2.conf b/keystone-moon/keystone/tests/unit/config_files/domain_configs_multi_ldap/keystone.domain2.conf
new file mode 100644
index 00000000..0ed68eb9
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/config_files/domain_configs_multi_ldap/keystone.domain2.conf
@@ -0,0 +1,13 @@
+# The domain-specific configuration file for the test domain
+# 'domain2' for use with unit tests.
+
+[ldap]
+url = fake://memory
+user = cn=Admin
+password = password
+suffix = cn=myroot,cn=com
+group_tree_dn = ou=UserGroups,dc=myroot,dc=org
+user_tree_dn = ou=Users,dc=myroot,dc=org
+
+[identity]
+driver = keystone.identity.backends.ldap.Identity \ No newline at end of file
diff --git a/keystone-moon/keystone/tests/unit/config_files/domain_configs_one_extra_sql/keystone.domain2.conf b/keystone-moon/keystone/tests/unit/config_files/domain_configs_one_extra_sql/keystone.domain2.conf
new file mode 100644
index 00000000..81b44462
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/config_files/domain_configs_one_extra_sql/keystone.domain2.conf
@@ -0,0 +1,5 @@
+# The domain-specific configuration file for the test domain
+# 'domain2' for use with unit tests.
+
+[identity]
+driver = keystone.identity.backends.sql.Identity \ No newline at end of file
diff --git a/keystone-moon/keystone/tests/unit/config_files/domain_configs_one_sql_one_ldap/keystone.Default.conf b/keystone-moon/keystone/tests/unit/config_files/domain_configs_one_sql_one_ldap/keystone.Default.conf
new file mode 100644
index 00000000..7049afed
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/config_files/domain_configs_one_sql_one_ldap/keystone.Default.conf
@@ -0,0 +1,14 @@
+# The domain-specific configuration file for the default domain for
+# use with unit tests.
+#
+# The domain_name of the default domain is 'Default', hence the
+# strange mix of upper/lower case in the file name.
+
+[ldap]
+url = fake://memory
+user = cn=Admin
+password = password
+suffix = cn=example,cn=com
+
+[identity]
+driver = keystone.identity.backends.ldap.Identity \ No newline at end of file
diff --git a/keystone-moon/keystone/tests/unit/config_files/domain_configs_one_sql_one_ldap/keystone.domain1.conf b/keystone-moon/keystone/tests/unit/config_files/domain_configs_one_sql_one_ldap/keystone.domain1.conf
new file mode 100644
index 00000000..a4492a67
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/config_files/domain_configs_one_sql_one_ldap/keystone.domain1.conf
@@ -0,0 +1,5 @@
+# The domain-specific configuration file for the test domain
+# 'domain1' for use with unit tests.
+
+[identity]
+driver = keystone.identity.backends.sql.Identity \ No newline at end of file
diff --git a/keystone-moon/keystone/tests/unit/config_files/test_auth_plugin.conf b/keystone-moon/keystone/tests/unit/config_files/test_auth_plugin.conf
new file mode 100644
index 00000000..abcc43ba
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/config_files/test_auth_plugin.conf
@@ -0,0 +1,7 @@
+[auth]
+methods = external,password,token,simple_challenge_response,saml2,openid,x509
+simple_challenge_response = keystone.tests.unit.test_auth_plugin.SimpleChallengeResponse
+saml2 = keystone.auth.plugins.mapped.Mapped
+openid = keystone.auth.plugins.mapped.Mapped
+x509 = keystone.auth.plugins.mapped.Mapped
+
diff --git a/keystone-moon/keystone/tests/unit/core.py b/keystone-moon/keystone/tests/unit/core.py
new file mode 100644
index 00000000..caca7dbd
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/core.py
@@ -0,0 +1,660 @@
+# Copyright 2012 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from __future__ import absolute_import
+import atexit
+import functools
+import logging
+import os
+import re
+import shutil
+import socket
+import sys
+import warnings
+
+import fixtures
+from oslo_config import cfg
+from oslo_config import fixture as config_fixture
+from oslo_log import log
+import oslotest.base as oslotest
+from oslotest import mockpatch
+import six
+from sqlalchemy import exc
+from testtools import testcase
+import webob
+
+# NOTE(ayoung)
+# environment.use_eventlet must run before any of the code that will
+# call the eventlet monkeypatching.
+from keystone.common import environment # noqa
+environment.use_eventlet()
+
+from keystone import auth
+from keystone.common import config as common_cfg
+from keystone.common import dependency
+from keystone.common import kvs
+from keystone.common.kvs import core as kvs_core
+from keystone import config
+from keystone import controllers
+from keystone import exception
+from keystone import notifications
+from keystone.policy.backends import rules
+from keystone.server import common
+from keystone import service
+from keystone.tests.unit import ksfixtures
+
+
+config.configure()
+
+LOG = log.getLogger(__name__)
+PID = six.text_type(os.getpid())
+TESTSDIR = os.path.dirname(os.path.abspath(__file__))
+TESTCONF = os.path.join(TESTSDIR, 'config_files')
+ROOTDIR = os.path.normpath(os.path.join(TESTSDIR, '..', '..', '..'))
+VENDOR = os.path.join(ROOTDIR, 'vendor')
+ETCDIR = os.path.join(ROOTDIR, 'etc')
+
+
+def _calc_tmpdir():
+ env_val = os.environ.get('KEYSTONE_TEST_TEMP_DIR')
+ if not env_val:
+ return os.path.join(TESTSDIR, 'tmp', PID)
+ return os.path.join(env_val, PID)
+
+
+TMPDIR = _calc_tmpdir()
+
+CONF = cfg.CONF
+log.register_options(CONF)
+rules.init()
+
+IN_MEM_DB_CONN_STRING = 'sqlite://'
+
+exception._FATAL_EXCEPTION_FORMAT_ERRORS = True
+os.makedirs(TMPDIR)
+atexit.register(shutil.rmtree, TMPDIR)
+
+
+class dirs(object):
+ @staticmethod
+ def root(*p):
+ return os.path.join(ROOTDIR, *p)
+
+ @staticmethod
+ def etc(*p):
+ return os.path.join(ETCDIR, *p)
+
+ @staticmethod
+ def tests(*p):
+ return os.path.join(TESTSDIR, *p)
+
+ @staticmethod
+ def tmp(*p):
+ return os.path.join(TMPDIR, *p)
+
+ @staticmethod
+ def tests_conf(*p):
+ return os.path.join(TESTCONF, *p)
+
+
+# keystone.common.sql.initialize() for testing.
+DEFAULT_TEST_DB_FILE = dirs.tmp('test.db')
+
+
+@atexit.register
+def remove_test_databases():
+ db = dirs.tmp('test.db')
+ if os.path.exists(db):
+ os.unlink(db)
+ pristine = dirs.tmp('test.db.pristine')
+ if os.path.exists(pristine):
+ os.unlink(pristine)
+
+
+def generate_paste_config(extension_name):
+ # Generate a file, based on keystone-paste.ini, that is named:
+ # extension_name.ini, and includes extension_name in the pipeline
+ with open(dirs.etc('keystone-paste.ini'), 'r') as f:
+ contents = f.read()
+
+ new_contents = contents.replace(' service_v3',
+ ' %s service_v3' % (extension_name))
+
+ new_paste_file = dirs.tmp(extension_name + '.ini')
+ with open(new_paste_file, 'w') as f:
+ f.write(new_contents)
+
+ return new_paste_file
+
+
+def remove_generated_paste_config(extension_name):
+ # Remove the generated paste config file, named extension_name.ini
+ paste_file_to_remove = dirs.tmp(extension_name + '.ini')
+ os.remove(paste_file_to_remove)
+
+
+def skip_if_cache_disabled(*sections):
+ """This decorator is used to skip a test if caching is disabled either
+ globally or for the specific section.
+
+ In the code fragment::
+
+ @skip_if_cache_is_disabled('assignment', 'token')
+ def test_method(*args):
+ ...
+
+ The method test_method would be skipped if caching is disabled globally via
+ the `enabled` option in the `cache` section of the configuration or if
+ the `caching` option is set to false in either `assignment` or `token`
+ sections of the configuration. This decorator can be used with no
+ arguments to only check global caching.
+
+ If a specified configuration section does not define the `caching` option,
+ this decorator makes the same assumption as the `should_cache_fn` in
+ keystone.common.cache that caching should be enabled.
+ """
+ def wrapper(f):
+ @functools.wraps(f)
+ def inner(*args, **kwargs):
+ if not CONF.cache.enabled:
+ raise testcase.TestSkipped('Cache globally disabled.')
+ for s in sections:
+ conf_sec = getattr(CONF, s, None)
+ if conf_sec is not None:
+ if not getattr(conf_sec, 'caching', True):
+ raise testcase.TestSkipped('%s caching disabled.' % s)
+ return f(*args, **kwargs)
+ return inner
+ return wrapper
+
+
+def skip_if_no_multiple_domains_support(f):
+ """This decorator is used to skip a test if an identity driver
+ does not support multiple domains.
+ """
+ @functools.wraps(f)
+ def wrapper(*args, **kwargs):
+ test_obj = args[0]
+ if not test_obj.identity_api.multiple_domains_supported:
+ raise testcase.TestSkipped('No multiple domains support')
+ return f(*args, **kwargs)
+ return wrapper
+
+
+class UnexpectedExit(Exception):
+ pass
+
+
+class BadLog(Exception):
+ """Raised on invalid call to logging (parameter mismatch)."""
+ pass
+
+
+class TestClient(object):
+ def __init__(self, app=None, token=None):
+ self.app = app
+ self.token = token
+
+ def request(self, method, path, headers=None, body=None):
+ if headers is None:
+ headers = {}
+
+ if self.token:
+ headers.setdefault('X-Auth-Token', self.token)
+
+ req = webob.Request.blank(path)
+ req.method = method
+ for k, v in six.iteritems(headers):
+ req.headers[k] = v
+ if body:
+ req.body = body
+ return req.get_response(self.app)
+
+ def get(self, path, headers=None):
+ return self.request('GET', path=path, headers=headers)
+
+ def post(self, path, headers=None, body=None):
+ return self.request('POST', path=path, headers=headers, body=body)
+
+ def put(self, path, headers=None, body=None):
+ return self.request('PUT', path=path, headers=headers, body=body)
+
+
+class BaseTestCase(oslotest.BaseTestCase):
+ """Light weight base test class.
+
+ This is a placeholder that will eventually go away once the
+ setup/teardown in TestCase is properly trimmed down to the bare
+ essentials. This is really just a play to speed up the tests by
+ eliminating unnecessary work.
+ """
+
+ def setUp(self):
+ super(BaseTestCase, self).setUp()
+ self.useFixture(mockpatch.PatchObject(sys, 'exit',
+ side_effect=UnexpectedExit))
+
+ def cleanup_instance(self, *names):
+ """Create a function suitable for use with self.addCleanup.
+
+ :returns: a callable that uses a closure to delete instance attributes
+
+ """
+ def cleanup():
+ for name in names:
+ # TODO(dstanek): remove this 'if' statement once
+ # load_backend in test_backend_ldap is only called once
+ # per test
+ if hasattr(self, name):
+ delattr(self, name)
+ return cleanup
+
+
+@dependency.requires('revoke_api')
+class TestCase(BaseTestCase):
+
+ def config_files(self):
+ return []
+
+ def config_overrides(self):
+ signing_certfile = 'examples/pki/certs/signing_cert.pem'
+ signing_keyfile = 'examples/pki/private/signing_key.pem'
+ self.config_fixture.config(group='oslo_policy',
+ policy_file=dirs.etc('policy.json'))
+ self.config_fixture.config(
+ # TODO(morganfainberg): Make Cache Testing a separate test case
+ # in tempest, and move it out of the base unit tests.
+ group='cache',
+ backend='dogpile.cache.memory',
+ enabled=True,
+ proxies=['keystone.tests.unit.test_cache.CacheIsolatingProxy'])
+ self.config_fixture.config(
+ group='catalog',
+ driver='keystone.catalog.backends.templated.Catalog',
+ template_file=dirs.tests('default_catalog.templates'))
+ self.config_fixture.config(
+ group='identity',
+ driver='keystone.identity.backends.sql.Identity')
+ self.config_fixture.config(
+ group='kvs',
+ backends=[
+ ('keystone.tests.unit.test_kvs.'
+ 'KVSBackendForcedKeyMangleFixture'),
+ 'keystone.tests.unit.test_kvs.KVSBackendFixture'])
+ self.config_fixture.config(
+ group='revoke',
+ driver='keystone.contrib.revoke.backends.kvs.Revoke')
+ self.config_fixture.config(
+ group='signing', certfile=signing_certfile,
+ keyfile=signing_keyfile,
+ ca_certs='examples/pki/certs/cacert.pem')
+ self.config_fixture.config(
+ group='token',
+ driver='keystone.token.persistence.backends.kvs.Token')
+ self.config_fixture.config(
+ group='trust',
+ driver='keystone.trust.backends.sql.Trust')
+ self.config_fixture.config(
+ group='saml', certfile=signing_certfile, keyfile=signing_keyfile)
+ self.config_fixture.config(
+ default_log_levels=[
+ 'amqp=WARN',
+ 'amqplib=WARN',
+ 'boto=WARN',
+ 'qpid=WARN',
+ 'sqlalchemy=WARN',
+ 'suds=INFO',
+ 'oslo.messaging=INFO',
+ 'iso8601=WARN',
+ 'requests.packages.urllib3.connectionpool=WARN',
+ 'routes.middleware=INFO',
+ 'stevedore.extension=INFO',
+ 'keystone.notifications=INFO',
+ 'keystone.common._memcache_pool=INFO',
+ 'keystone.common.ldap=INFO',
+ ])
+ self.auth_plugin_config_override()
+
+ def auth_plugin_config_override(self, methods=None, **method_classes):
+ if methods is None:
+ methods = ['external', 'password', 'token', ]
+ if not method_classes:
+ method_classes = dict(
+ external='keystone.auth.plugins.external.DefaultDomain',
+ password='keystone.auth.plugins.password.Password',
+ token='keystone.auth.plugins.token.Token',
+ )
+ self.config_fixture.config(group='auth', methods=methods)
+ common_cfg.setup_authentication()
+ if method_classes:
+ self.config_fixture.config(group='auth', **method_classes)
+
+ def setUp(self):
+ super(TestCase, self).setUp()
+ self.addCleanup(self.cleanup_instance('config_fixture', 'logger'))
+
+ self.addCleanup(CONF.reset)
+
+ self.useFixture(mockpatch.PatchObject(logging.Handler, 'handleError',
+ side_effect=BadLog))
+ self.config_fixture = self.useFixture(config_fixture.Config(CONF))
+ self.config(self.config_files())
+
+ # NOTE(morganfainberg): mock the auth plugin setup to use the config
+ # fixture which automatically unregisters options when performing
+ # cleanup.
+ def mocked_register_auth_plugin_opt(conf, opt):
+ self.config_fixture.register_opt(opt, group='auth')
+ self.register_auth_plugin_opt_patch = self.useFixture(
+ mockpatch.PatchObject(common_cfg, '_register_auth_plugin_opt',
+ new=mocked_register_auth_plugin_opt))
+
+ self.config_overrides()
+
+ self.logger = self.useFixture(fixtures.FakeLogger(level=logging.DEBUG))
+
+ # NOTE(morganfainberg): This code is a copy from the oslo-incubator
+ # log module. This is not in a function or otherwise available to use
+ # without having a CONF object to setup logging. This should help to
+ # reduce the log size by limiting what we log (similar to how Keystone
+ # would run under mod_wsgi or eventlet).
+ for pair in CONF.default_log_levels:
+ mod, _sep, level_name = pair.partition('=')
+ logger = logging.getLogger(mod)
+ logger.setLevel(level_name)
+
+ warnings.filterwarnings('error', category=DeprecationWarning,
+ module='^keystone\\.')
+ warnings.simplefilter('error', exc.SAWarning)
+ self.addCleanup(warnings.resetwarnings)
+
+ self.useFixture(ksfixtures.Cache())
+
+ # Clear the registry of providers so that providers from previous
+ # tests aren't used.
+ self.addCleanup(dependency.reset)
+
+ self.addCleanup(kvs.INMEMDB.clear)
+
+ # Ensure Notification subscriptions and resource types are empty
+ self.addCleanup(notifications.clear_subscribers)
+ self.addCleanup(notifications.reset_notifier)
+
+ # Reset the auth-plugin registry
+ self.addCleanup(self.clear_auth_plugin_registry)
+
+ self.addCleanup(setattr, controllers, '_VERSIONS', [])
+
+ def config(self, config_files):
+ CONF(args=[], project='keystone', default_config_files=config_files)
+
+ def load_backends(self):
+ """Initializes each manager and assigns them to an attribute."""
+
+ # TODO(blk-u): Shouldn't need to clear the registry here, but some
+ # tests call load_backends multiple times. These should be fixed to
+ # only call load_backends once.
+ dependency.reset()
+
+ # TODO(morganfainberg): Shouldn't need to clear the registry here, but
+ # some tests call load_backends multiple times. Since it is not
+ # possible to re-configure a backend, we need to clear the list. This
+ # should eventually be removed once testing has been cleaned up.
+ kvs_core.KEY_VALUE_STORE_REGISTRY.clear()
+
+ self.clear_auth_plugin_registry()
+ drivers, _unused = common.setup_backends(
+ load_extra_backends_fn=self.load_extra_backends)
+
+ for manager_name, manager in six.iteritems(drivers):
+ setattr(self, manager_name, manager)
+ self.addCleanup(self.cleanup_instance(*drivers.keys()))
+
+ def load_extra_backends(self):
+ """Override to load managers that aren't loaded by default.
+
+ This is useful to load managers initialized by extensions. No extra
+ backends are loaded by default.
+
+ :return: dict of name -> manager
+ """
+ return {}
+
+ def load_fixtures(self, fixtures):
+ """Hacky basic and naive fixture loading based on a python module.
+
+ Expects that the various APIs into the various services are already
+ defined on `self`.
+
+ """
+ # NOTE(dstanek): create a list of attribute names to be removed
+ # from this instance during cleanup
+ fixtures_to_cleanup = []
+
+ # TODO(termie): doing something from json, probably based on Django's
+ # loaddata will be much preferred.
+ if (hasattr(self, 'identity_api') and
+ hasattr(self, 'assignment_api') and
+ hasattr(self, 'resource_api')):
+ for domain in fixtures.DOMAINS:
+ try:
+ rv = self.resource_api.create_domain(domain['id'], domain)
+ except exception.Conflict:
+ rv = self.resource_api.get_domain(domain['id'])
+ except exception.NotImplemented:
+ rv = domain
+ attrname = 'domain_%s' % domain['id']
+ setattr(self, attrname, rv)
+ fixtures_to_cleanup.append(attrname)
+
+ for tenant in fixtures.TENANTS:
+ if hasattr(self, 'tenant_%s' % tenant['id']):
+ try:
+ # This will clear out any roles on the project as well
+ self.resource_api.delete_project(tenant['id'])
+ except exception.ProjectNotFound:
+ pass
+ rv = self.resource_api.create_project(
+ tenant['id'], tenant)
+
+ attrname = 'tenant_%s' % tenant['id']
+ setattr(self, attrname, rv)
+ fixtures_to_cleanup.append(attrname)
+
+ for role in fixtures.ROLES:
+ try:
+ rv = self.role_api.create_role(role['id'], role)
+ except exception.Conflict:
+ rv = self.role_api.get_role(role['id'])
+ attrname = 'role_%s' % role['id']
+ setattr(self, attrname, rv)
+ fixtures_to_cleanup.append(attrname)
+
+ for user in fixtures.USERS:
+ user_copy = user.copy()
+ tenants = user_copy.pop('tenants')
+ try:
+ existing_user = getattr(self, 'user_%s' % user['id'], None)
+ if existing_user is not None:
+ self.identity_api.delete_user(existing_user['id'])
+ except exception.UserNotFound:
+ pass
+
+ # For users, the manager layer will generate the ID
+ user_copy = self.identity_api.create_user(user_copy)
+ # Our tests expect that the password is still in the user
+ # record so that they can reference it, so put it back into
+ # the dict returned.
+ user_copy['password'] = user['password']
+
+ for tenant_id in tenants:
+ try:
+ self.assignment_api.add_user_to_project(
+ tenant_id, user_copy['id'])
+ except exception.Conflict:
+ pass
+ # Use the ID from the fixture as the attribute name, so
+ # that our tests can easily reference each user dict, while
+ # the ID in the dict will be the real public ID.
+ attrname = 'user_%s' % user['id']
+ setattr(self, attrname, user_copy)
+ fixtures_to_cleanup.append(attrname)
+
+ self.addCleanup(self.cleanup_instance(*fixtures_to_cleanup))
+
+ def _paste_config(self, config):
+ if not config.startswith('config:'):
+ test_path = os.path.join(TESTSDIR, config)
+ etc_path = os.path.join(ROOTDIR, 'etc', config)
+ for path in [test_path, etc_path]:
+ if os.path.exists('%s-paste.ini' % path):
+ return 'config:%s-paste.ini' % path
+ return config
+
+ def loadapp(self, config, name='main'):
+ return service.loadapp(self._paste_config(config), name=name)
+
+ def clear_auth_plugin_registry(self):
+ auth.controllers.AUTH_METHODS.clear()
+ auth.controllers.AUTH_PLUGINS_LOADED = False
+
+ def assertCloseEnoughForGovernmentWork(self, a, b, delta=3):
+ """Asserts that two datetimes are nearly equal within a small delta.
+
+ :param delta: Maximum allowable time delta, defined in seconds.
+ """
+ msg = '%s != %s within %s delta' % (a, b, delta)
+
+ self.assertTrue(abs(a - b).seconds <= delta, msg)
+
+ def assertNotEmpty(self, l):
+ self.assertTrue(len(l))
+
+ def assertDictEqual(self, d1, d2, msg=None):
+ self.assertIsInstance(d1, dict)
+ self.assertIsInstance(d2, dict)
+ self.assertEqual(d1, d2, msg)
+
+ def assertRaisesRegexp(self, expected_exception, expected_regexp,
+ callable_obj, *args, **kwargs):
+ """Asserts that the message in a raised exception matches a regexp.
+ """
+ try:
+ callable_obj(*args, **kwargs)
+ except expected_exception as exc_value:
+ if isinstance(expected_regexp, six.string_types):
+ expected_regexp = re.compile(expected_regexp)
+
+ if isinstance(exc_value.args[0], unicode):
+ if not expected_regexp.search(unicode(exc_value)):
+ raise self.failureException(
+ '"%s" does not match "%s"' %
+ (expected_regexp.pattern, unicode(exc_value)))
+ else:
+ if not expected_regexp.search(str(exc_value)):
+ raise self.failureException(
+ '"%s" does not match "%s"' %
+ (expected_regexp.pattern, str(exc_value)))
+ else:
+ if hasattr(expected_exception, '__name__'):
+ excName = expected_exception.__name__
+ else:
+ excName = str(expected_exception)
+ raise self.failureException("%s not raised" % excName)
+
+ def assertDictContainsSubset(self, expected, actual, msg=None):
+ """Checks whether actual is a superset of expected."""
+
+ def safe_repr(obj, short=False):
+ _MAX_LENGTH = 80
+ try:
+ result = repr(obj)
+ except Exception:
+ result = object.__repr__(obj)
+ if not short or len(result) < _MAX_LENGTH:
+ return result
+ return result[:_MAX_LENGTH] + ' [truncated]...'
+
+ missing = []
+ mismatched = []
+ for key, value in six.iteritems(expected):
+ if key not in actual:
+ missing.append(key)
+ elif value != actual[key]:
+ mismatched.append('%s, expected: %s, actual: %s' %
+ (safe_repr(key), safe_repr(value),
+ safe_repr(actual[key])))
+
+ if not (missing or mismatched):
+ return
+
+ standardMsg = ''
+ if missing:
+ standardMsg = 'Missing: %s' % ','.join(safe_repr(m) for m in
+ missing)
+ if mismatched:
+ if standardMsg:
+ standardMsg += '; '
+ standardMsg += 'Mismatched values: %s' % ','.join(mismatched)
+
+ self.fail(self._formatMessage(msg, standardMsg))
+
+ @property
+ def ipv6_enabled(self):
+ if socket.has_ipv6:
+ sock = None
+ try:
+ sock = socket.socket(socket.AF_INET6)
+ # NOTE(Mouad): Try to bind to IPv6 loopback ip address.
+ sock.bind(("::1", 0))
+ return True
+ except socket.error:
+ pass
+ finally:
+ if sock:
+ sock.close()
+ return False
+
+ def skip_if_no_ipv6(self):
+ if not self.ipv6_enabled:
+ raise self.skipTest("IPv6 is not enabled in the system")
+
+ def skip_if_env_not_set(self, env_var):
+ if not os.environ.get(env_var):
+ self.skipTest('Env variable %s is not set.' % env_var)
+
+
+class SQLDriverOverrides(object):
+ """A mixin for consolidating sql-specific test overrides."""
+ def config_overrides(self):
+ super(SQLDriverOverrides, self).config_overrides()
+ # SQL specific driver overrides
+ self.config_fixture.config(
+ group='catalog',
+ driver='keystone.catalog.backends.sql.Catalog')
+ self.config_fixture.config(
+ group='identity',
+ driver='keystone.identity.backends.sql.Identity')
+ self.config_fixture.config(
+ group='policy',
+ driver='keystone.policy.backends.sql.Policy')
+ self.config_fixture.config(
+ group='revoke',
+ driver='keystone.contrib.revoke.backends.sql.Revoke')
+ self.config_fixture.config(
+ group='token',
+ driver='keystone.token.persistence.backends.sql.Token')
+ self.config_fixture.config(
+ group='trust',
+ driver='keystone.trust.backends.sql.Trust')
diff --git a/keystone-moon/keystone/tests/unit/default_catalog.templates b/keystone-moon/keystone/tests/unit/default_catalog.templates
new file mode 100644
index 00000000..faf87eb5
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/default_catalog.templates
@@ -0,0 +1,14 @@
+# config for templated.Catalog, using camelCase because I don't want to do
+# translations for keystone compat
+catalog.RegionOne.identity.publicURL = http://localhost:$(public_port)s/v2.0
+catalog.RegionOne.identity.adminURL = http://localhost:$(admin_port)s/v2.0
+catalog.RegionOne.identity.internalURL = http://localhost:$(admin_port)s/v2.0
+catalog.RegionOne.identity.name = 'Identity Service'
+catalog.RegionOne.identity.id = 1
+
+# fake compute service for now to help novaclient tests work
+catalog.RegionOne.compute.publicURL = http://localhost:8774/v1.1/$(tenant_id)s
+catalog.RegionOne.compute.adminURL = http://localhost:8774/v1.1/$(tenant_id)s
+catalog.RegionOne.compute.internalURL = http://localhost:8774/v1.1/$(tenant_id)s
+catalog.RegionOne.compute.name = 'Compute Service'
+catalog.RegionOne.compute.id = 2
diff --git a/keystone-moon/keystone/tests/unit/default_fixtures.py b/keystone-moon/keystone/tests/unit/default_fixtures.py
new file mode 100644
index 00000000..f7e2064f
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/default_fixtures.py
@@ -0,0 +1,121 @@
+# Copyright 2012 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+# NOTE(dolph): please try to avoid additional fixtures if possible; test suite
+# performance may be negatively affected.
+
+DEFAULT_DOMAIN_ID = 'default'
+
+TENANTS = [
+ {
+ 'id': 'bar',
+ 'name': 'BAR',
+ 'domain_id': DEFAULT_DOMAIN_ID,
+ 'description': 'description',
+ 'enabled': True,
+ 'parent_id': None,
+ }, {
+ 'id': 'baz',
+ 'name': 'BAZ',
+ 'domain_id': DEFAULT_DOMAIN_ID,
+ 'description': 'description',
+ 'enabled': True,
+ 'parent_id': None,
+ }, {
+ 'id': 'mtu',
+ 'name': 'MTU',
+ 'description': 'description',
+ 'enabled': True,
+ 'domain_id': DEFAULT_DOMAIN_ID,
+ 'parent_id': None,
+ }, {
+ 'id': 'service',
+ 'name': 'service',
+ 'description': 'description',
+ 'enabled': True,
+ 'domain_id': DEFAULT_DOMAIN_ID,
+ 'parent_id': None,
+ }
+]
+
+# NOTE(ja): a role of keystone_admin is done in setUp
+USERS = [
+ {
+ 'id': 'foo',
+ 'name': 'FOO',
+ 'domain_id': DEFAULT_DOMAIN_ID,
+ 'password': 'foo2',
+ 'tenants': ['bar'],
+ 'enabled': True,
+ 'email': 'foo@bar.com',
+ }, {
+ 'id': 'two',
+ 'name': 'TWO',
+ 'domain_id': DEFAULT_DOMAIN_ID,
+ 'password': 'two2',
+ 'enabled': True,
+ 'default_project_id': 'baz',
+ 'tenants': ['baz'],
+ 'email': 'two@three.com',
+ }, {
+ 'id': 'badguy',
+ 'name': 'BadGuy',
+ 'domain_id': DEFAULT_DOMAIN_ID,
+ 'password': 'bad',
+ 'enabled': False,
+ 'default_project_id': 'baz',
+ 'tenants': ['baz'],
+ 'email': 'bad@guy.com',
+ }, {
+ 'id': 'sna',
+ 'name': 'SNA',
+ 'domain_id': DEFAULT_DOMAIN_ID,
+ 'password': 'snafu',
+ 'enabled': True,
+ 'tenants': ['bar'],
+ 'email': 'sna@snl.coom',
+ }
+]
+
+ROLES = [
+ {
+ 'id': 'admin',
+ 'name': 'admin',
+ }, {
+ 'id': 'member',
+ 'name': 'Member',
+ }, {
+ 'id': '9fe2ff9ee4384b1894a90878d3e92bab',
+ 'name': '_member_',
+ }, {
+ 'id': 'other',
+ 'name': 'Other',
+ }, {
+ 'id': 'browser',
+ 'name': 'Browser',
+ }, {
+ 'id': 'writer',
+ 'name': 'Writer',
+ }, {
+ 'id': 'service',
+ 'name': 'Service',
+ }
+]
+
+DOMAINS = [{'description':
+ (u'Owns users and tenants (i.e. projects)'
+ ' available on Identity API v2.'),
+ 'enabled': True,
+ 'id': DEFAULT_DOMAIN_ID,
+ 'name': u'Default'}]
diff --git a/keystone-moon/keystone/tests/unit/fakeldap.py b/keystone-moon/keystone/tests/unit/fakeldap.py
new file mode 100644
index 00000000..85aaadfe
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/fakeldap.py
@@ -0,0 +1,602 @@
+# Copyright 2010 United States Government as represented by the
+# Administrator of the National Aeronautics and Space Administration.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""Fake LDAP server for test harness.
+
+This class does very little error checking, and knows nothing about ldap
+class definitions. It implements the minimum emulation of the python ldap
+library to work with nova.
+
+"""
+
+import re
+import shelve
+
+import ldap
+from oslo_config import cfg
+from oslo_log import log
+import six
+from six import moves
+
+from keystone.common.ldap import core
+from keystone import exception
+
+
+SCOPE_NAMES = {
+ ldap.SCOPE_BASE: 'SCOPE_BASE',
+ ldap.SCOPE_ONELEVEL: 'SCOPE_ONELEVEL',
+ ldap.SCOPE_SUBTREE: 'SCOPE_SUBTREE',
+}
+
+# http://msdn.microsoft.com/en-us/library/windows/desktop/aa366991(v=vs.85).aspx # noqa
+CONTROL_TREEDELETE = '1.2.840.113556.1.4.805'
+
+LOG = log.getLogger(__name__)
+CONF = cfg.CONF
+
+
+def _internal_attr(attr_name, value_or_values):
+ def normalize_value(value):
+ return core.utf8_decode(value)
+
+ def normalize_dn(dn):
+ # Capitalize the attribute names as an LDAP server might.
+
+ # NOTE(blk-u): Special case for this tested value, used with
+ # test_user_id_comma. The call to str2dn here isn't always correct
+ # here, because `dn` is escaped for an LDAP filter. str2dn() normally
+ # works only because there's no special characters in `dn`.
+ if dn == 'cn=Doe\\5c, John,ou=Users,cn=example,cn=com':
+ return 'CN=Doe\\, John,OU=Users,CN=example,CN=com'
+
+ # NOTE(blk-u): Another special case for this tested value. When a
+ # roleOccupant has an escaped comma, it gets converted to \2C.
+ if dn == 'cn=Doe\\, John,ou=Users,cn=example,cn=com':
+ return 'CN=Doe\\2C John,OU=Users,CN=example,CN=com'
+
+ dn = ldap.dn.str2dn(core.utf8_encode(dn))
+ norm = []
+ for part in dn:
+ name, val, i = part[0]
+ name = core.utf8_decode(name)
+ name = name.upper()
+ name = core.utf8_encode(name)
+ norm.append([(name, val, i)])
+ return core.utf8_decode(ldap.dn.dn2str(norm))
+
+ if attr_name in ('member', 'roleOccupant'):
+ attr_fn = normalize_dn
+ else:
+ attr_fn = normalize_value
+
+ if isinstance(value_or_values, list):
+ return [attr_fn(x) for x in value_or_values]
+ return [attr_fn(value_or_values)]
+
+
+def _match_query(query, attrs):
+ """Match an ldap query to an attribute dictionary.
+
+ The characters &, |, and ! are supported in the query. No syntax checking
+ is performed, so malformed queries will not work correctly.
+ """
+ # cut off the parentheses
+ inner = query[1:-1]
+ if inner.startswith(('&', '|')):
+ if inner[0] == '&':
+ matchfn = all
+ else:
+ matchfn = any
+ # cut off the & or |
+ groups = _paren_groups(inner[1:])
+ return matchfn(_match_query(group, attrs) for group in groups)
+ if inner.startswith('!'):
+ # cut off the ! and the nested parentheses
+ return not _match_query(query[2:-1], attrs)
+
+ (k, _sep, v) = inner.partition('=')
+ return _match(k, v, attrs)
+
+
+def _paren_groups(source):
+ """Split a string into parenthesized groups."""
+ count = 0
+ start = 0
+ result = []
+ for pos in moves.range(len(source)):
+ if source[pos] == '(':
+ if count == 0:
+ start = pos
+ count += 1
+ if source[pos] == ')':
+ count -= 1
+ if count == 0:
+ result.append(source[start:pos + 1])
+ return result
+
+
+def _match(key, value, attrs):
+ """Match a given key and value against an attribute list."""
+
+ def match_with_wildcards(norm_val, val_list):
+ # Case insensitive checking with wildcards
+ if norm_val.startswith('*'):
+ if norm_val.endswith('*'):
+ # Is the string anywhere in the target?
+ for x in val_list:
+ if norm_val[1:-1] in x:
+ return True
+ else:
+ # Is the string at the end of the target?
+ for x in val_list:
+ if (norm_val[1:] ==
+ x[len(x) - len(norm_val) + 1:]):
+ return True
+ elif norm_val.endswith('*'):
+ # Is the string at the start of the target?
+ for x in val_list:
+ if norm_val[:-1] == x[:len(norm_val) - 1]:
+ return True
+ else:
+ # Is the string an exact match?
+ for x in val_list:
+ if check_value == x:
+ return True
+ return False
+
+ if key not in attrs:
+ return False
+ # This is a pure wild card search, so the answer must be yes!
+ if value == '*':
+ return True
+ if key == 'serviceId':
+ # for serviceId, the backend is returning a list of numbers
+ # make sure we convert them to strings first before comparing
+ # them
+ str_sids = [six.text_type(x) for x in attrs[key]]
+ return six.text_type(value) in str_sids
+ if key != 'objectclass':
+ check_value = _internal_attr(key, value)[0].lower()
+ norm_values = list(
+ _internal_attr(key, x)[0].lower() for x in attrs[key])
+ return match_with_wildcards(check_value, norm_values)
+ # it is an objectclass check, so check subclasses
+ values = _subs(value)
+ for v in values:
+ if v in attrs[key]:
+ return True
+ return False
+
+
+def _subs(value):
+ """Returns a list of subclass strings.
+
+ The strings represent the ldap objectclass plus any subclasses that
+ inherit from it. Fakeldap doesn't know about the ldap object structure,
+ so subclasses need to be defined manually in the dictionary below.
+
+ """
+ subs = {'groupOfNames': ['keystoneTenant',
+ 'keystoneRole',
+ 'keystoneTenantRole']}
+ if value in subs:
+ return [value] + subs[value]
+ return [value]
+
+
+server_fail = False
+
+
+class FakeShelve(dict):
+
+ def sync(self):
+ pass
+
+
+FakeShelves = {}
+
+
+class FakeLdap(core.LDAPHandler):
+ '''Emulate the python-ldap API.
+
+ The python-ldap API requires all strings to be UTF-8 encoded. This
+ is assured by the caller of this interface
+ (i.e. KeystoneLDAPHandler).
+
+ However, internally this emulation MUST process and store strings
+ in a canonical form which permits operations on
+ characters. Encoded strings do not provide the ability to operate
+ on characters. Therefore this emulation accepts UTF-8 encoded
+ strings, decodes them to unicode for operations internal to this
+ emulation, and encodes them back to UTF-8 when returning values
+ from the emulation.
+ '''
+
+ __prefix = 'ldap:'
+
+ def __init__(self, conn=None):
+ super(FakeLdap, self).__init__(conn=conn)
+ self._ldap_options = {ldap.OPT_DEREF: ldap.DEREF_NEVER}
+
+ def connect(self, url, page_size=0, alias_dereferencing=None,
+ use_tls=False, tls_cacertfile=None, tls_cacertdir=None,
+ tls_req_cert='demand', chase_referrals=None, debug_level=None,
+ use_pool=None, pool_size=None, pool_retry_max=None,
+ pool_retry_delay=None, pool_conn_timeout=None,
+ pool_conn_lifetime=None):
+ if url.startswith('fake://memory'):
+ if url not in FakeShelves:
+ FakeShelves[url] = FakeShelve()
+ self.db = FakeShelves[url]
+ else:
+ self.db = shelve.open(url[7:])
+
+ using_ldaps = url.lower().startswith("ldaps")
+
+ if use_tls and using_ldaps:
+ raise AssertionError('Invalid TLS / LDAPS combination')
+
+ if use_tls:
+ if tls_cacertfile:
+ ldap.set_option(ldap.OPT_X_TLS_CACERTFILE, tls_cacertfile)
+ elif tls_cacertdir:
+ ldap.set_option(ldap.OPT_X_TLS_CACERTDIR, tls_cacertdir)
+ if tls_req_cert in core.LDAP_TLS_CERTS.values():
+ ldap.set_option(ldap.OPT_X_TLS_REQUIRE_CERT, tls_req_cert)
+ else:
+ raise ValueError("invalid TLS_REQUIRE_CERT tls_req_cert=%s",
+ tls_req_cert)
+
+ if alias_dereferencing is not None:
+ self.set_option(ldap.OPT_DEREF, alias_dereferencing)
+ self.page_size = page_size
+
+ self.use_pool = use_pool
+ self.pool_size = pool_size
+ self.pool_retry_max = pool_retry_max
+ self.pool_retry_delay = pool_retry_delay
+ self.pool_conn_timeout = pool_conn_timeout
+ self.pool_conn_lifetime = pool_conn_lifetime
+
+ def dn(self, dn):
+ return core.utf8_decode(dn)
+
+ def _dn_to_id_attr(self, dn):
+ return core.utf8_decode(ldap.dn.str2dn(core.utf8_encode(dn))[0][0][0])
+
+ def _dn_to_id_value(self, dn):
+ return core.utf8_decode(ldap.dn.str2dn(core.utf8_encode(dn))[0][0][1])
+
+ def key(self, dn):
+ return '%s%s' % (self.__prefix, self.dn(dn))
+
+ def simple_bind_s(self, who='', cred='',
+ serverctrls=None, clientctrls=None):
+ """This method is ignored, but provided for compatibility."""
+ if server_fail:
+ raise ldap.SERVER_DOWN
+ whos = ['cn=Admin', CONF.ldap.user]
+ if who in whos and cred in ['password', CONF.ldap.password]:
+ return
+
+ try:
+ attrs = self.db[self.key(who)]
+ except KeyError:
+ LOG.debug('bind fail: who=%s not found', core.utf8_decode(who))
+ raise ldap.NO_SUCH_OBJECT
+
+ db_password = None
+ try:
+ db_password = attrs['userPassword'][0]
+ except (KeyError, IndexError):
+ LOG.debug('bind fail: password for who=%s not found',
+ core.utf8_decode(who))
+ raise ldap.INAPPROPRIATE_AUTH
+
+ if cred != db_password:
+ LOG.debug('bind fail: password for who=%s does not match',
+ core.utf8_decode(who))
+ raise ldap.INVALID_CREDENTIALS
+
+ def unbind_s(self):
+ """This method is ignored, but provided for compatibility."""
+ if server_fail:
+ raise ldap.SERVER_DOWN
+
+ def add_s(self, dn, modlist):
+ """Add an object with the specified attributes at dn."""
+ if server_fail:
+ raise ldap.SERVER_DOWN
+
+ id_attr_in_modlist = False
+ id_attr = self._dn_to_id_attr(dn)
+ id_value = self._dn_to_id_value(dn)
+
+ # The LDAP API raises a TypeError if attr name is None.
+ for k, dummy_v in modlist:
+ if k is None:
+ raise TypeError('must be string, not None. modlist=%s' %
+ modlist)
+
+ if k == id_attr:
+ for val in dummy_v:
+ if core.utf8_decode(val) == id_value:
+ id_attr_in_modlist = True
+
+ if not id_attr_in_modlist:
+ LOG.debug('id_attribute=%(attr)s missing, attributes=%(attrs)s' %
+ {'attr': id_attr, 'attrs': modlist})
+ raise ldap.NAMING_VIOLATION
+ key = self.key(dn)
+ LOG.debug('add item: dn=%(dn)s, attrs=%(attrs)s', {
+ 'dn': core.utf8_decode(dn), 'attrs': modlist})
+ if key in self.db:
+ LOG.debug('add item failed: dn=%s is already in store.',
+ core.utf8_decode(dn))
+ raise ldap.ALREADY_EXISTS(dn)
+
+ self.db[key] = {k: _internal_attr(k, v) for k, v in modlist}
+ self.db.sync()
+
+ def delete_s(self, dn):
+ """Remove the ldap object at specified dn."""
+ return self.delete_ext_s(dn, serverctrls=[])
+
+ def _getChildren(self, dn):
+ return [k for k, v in six.iteritems(self.db)
+ if re.match('%s.*,%s' % (
+ re.escape(self.__prefix),
+ re.escape(self.dn(dn))), k)]
+
+ def delete_ext_s(self, dn, serverctrls, clientctrls=None):
+ """Remove the ldap object at specified dn."""
+ if server_fail:
+ raise ldap.SERVER_DOWN
+
+ try:
+ if CONTROL_TREEDELETE in [c.controlType for c in serverctrls]:
+ LOG.debug('FakeLdap subtree_delete item: dn=%s',
+ core.utf8_decode(dn))
+ children = self._getChildren(dn)
+ for c in children:
+ del self.db[c]
+
+ key = self.key(dn)
+ LOG.debug('FakeLdap delete item: dn=%s', core.utf8_decode(dn))
+ del self.db[key]
+ except KeyError:
+ LOG.debug('delete item failed: dn=%s not found.',
+ core.utf8_decode(dn))
+ raise ldap.NO_SUCH_OBJECT
+ self.db.sync()
+
+ def modify_s(self, dn, modlist):
+ """Modify the object at dn using the attribute list.
+
+ :param dn: an LDAP DN
+ :param modlist: a list of tuples in the following form:
+ ([MOD_ADD | MOD_DELETE | MOD_REPACE], attribute, value)
+ """
+ if server_fail:
+ raise ldap.SERVER_DOWN
+
+ key = self.key(dn)
+ LOG.debug('modify item: dn=%(dn)s attrs=%(attrs)s', {
+ 'dn': core.utf8_decode(dn), 'attrs': modlist})
+ try:
+ entry = self.db[key]
+ except KeyError:
+ LOG.debug('modify item failed: dn=%s not found.',
+ core.utf8_decode(dn))
+ raise ldap.NO_SUCH_OBJECT
+
+ for cmd, k, v in modlist:
+ values = entry.setdefault(k, [])
+ if cmd == ldap.MOD_ADD:
+ v = _internal_attr(k, v)
+ for x in v:
+ if x in values:
+ raise ldap.TYPE_OR_VALUE_EXISTS
+ values += v
+ elif cmd == ldap.MOD_REPLACE:
+ values[:] = _internal_attr(k, v)
+ elif cmd == ldap.MOD_DELETE:
+ if v is None:
+ if not values:
+ LOG.debug('modify item failed: '
+ 'item has no attribute "%s" to delete', k)
+ raise ldap.NO_SUCH_ATTRIBUTE
+ values[:] = []
+ else:
+ for val in _internal_attr(k, v):
+ try:
+ values.remove(val)
+ except ValueError:
+ LOG.debug('modify item failed: '
+ 'item has no attribute "%(k)s" with '
+ 'value "%(v)s" to delete', {
+ 'k': k, 'v': val})
+ raise ldap.NO_SUCH_ATTRIBUTE
+ else:
+ LOG.debug('modify item failed: unknown command %s', cmd)
+ raise NotImplementedError('modify_s action %s not'
+ ' implemented' % cmd)
+ self.db[key] = entry
+ self.db.sync()
+
+ def search_s(self, base, scope,
+ filterstr='(objectClass=*)', attrlist=None, attrsonly=0):
+ """Search for all matching objects under base using the query.
+
+ Args:
+ base -- dn to search under
+ scope -- search scope (base, subtree, onelevel)
+ filterstr -- filter objects by
+ attrlist -- attrs to return. Returns all attrs if not specified
+
+ """
+ if server_fail:
+ raise ldap.SERVER_DOWN
+
+ if scope == ldap.SCOPE_BASE:
+ try:
+ item_dict = self.db[self.key(base)]
+ except KeyError:
+ LOG.debug('search fail: dn not found for SCOPE_BASE')
+ raise ldap.NO_SUCH_OBJECT
+ results = [(base, item_dict)]
+ elif scope == ldap.SCOPE_SUBTREE:
+ # FIXME - LDAP search with SUBTREE scope must return the base
+ # entry, but the code below does _not_. Unfortunately, there are
+ # several tests that depend on this broken behavior, and fail
+ # when the base entry is returned in the search results. The
+ # fix is easy here, just initialize results as above for
+ # the SCOPE_BASE case.
+ # https://bugs.launchpad.net/keystone/+bug/1368772
+ try:
+ item_dict = self.db[self.key(base)]
+ except KeyError:
+ LOG.debug('search fail: dn not found for SCOPE_SUBTREE')
+ raise ldap.NO_SUCH_OBJECT
+ results = [(base, item_dict)]
+ extraresults = [(k[len(self.__prefix):], v)
+ for k, v in six.iteritems(self.db)
+ if re.match('%s.*,%s' %
+ (re.escape(self.__prefix),
+ re.escape(self.dn(base))), k)]
+ results.extend(extraresults)
+ elif scope == ldap.SCOPE_ONELEVEL:
+
+ def get_entries():
+ base_dn = ldap.dn.str2dn(core.utf8_encode(base))
+ base_len = len(base_dn)
+
+ for k, v in six.iteritems(self.db):
+ if not k.startswith(self.__prefix):
+ continue
+ k_dn_str = k[len(self.__prefix):]
+ k_dn = ldap.dn.str2dn(core.utf8_encode(k_dn_str))
+ if len(k_dn) != base_len + 1:
+ continue
+ if k_dn[-base_len:] != base_dn:
+ continue
+ yield (k_dn_str, v)
+
+ results = list(get_entries())
+
+ else:
+ # openldap client/server raises PROTOCOL_ERROR for unexpected scope
+ raise ldap.PROTOCOL_ERROR
+
+ objects = []
+ for dn, attrs in results:
+ # filter the objects by filterstr
+ id_attr, id_val, _ = ldap.dn.str2dn(core.utf8_encode(dn))[0][0]
+ id_attr = core.utf8_decode(id_attr)
+ id_val = core.utf8_decode(id_val)
+ match_attrs = attrs.copy()
+ match_attrs[id_attr] = [id_val]
+ if not filterstr or _match_query(filterstr, match_attrs):
+ # filter the attributes by attrlist
+ attrs = {k: v for k, v in six.iteritems(attrs)
+ if not attrlist or k in attrlist}
+ objects.append((dn, attrs))
+
+ return objects
+
+ def set_option(self, option, invalue):
+ self._ldap_options[option] = invalue
+
+ def get_option(self, option):
+ value = self._ldap_options.get(option, None)
+ return value
+
+ def search_ext(self, base, scope,
+ filterstr='(objectClass=*)', attrlist=None, attrsonly=0,
+ serverctrls=None, clientctrls=None,
+ timeout=-1, sizelimit=0):
+ raise exception.NotImplemented()
+
+ def result3(self, msgid=ldap.RES_ANY, all=1, timeout=None,
+ resp_ctrl_classes=None):
+ raise exception.NotImplemented()
+
+
+class FakeLdapPool(FakeLdap):
+ '''Emulate the python-ldap API with pooled connections using existing
+ FakeLdap logic.
+
+ This class is used as connector class in PooledLDAPHandler.
+ '''
+
+ def __init__(self, uri, retry_max=None, retry_delay=None, conn=None):
+ super(FakeLdapPool, self).__init__(conn=conn)
+ self.url = uri
+ self.connected = None
+ self.conn = self
+ self._connection_time = 5 # any number greater than 0
+
+ def get_lifetime(self):
+ return self._connection_time
+
+ def simple_bind_s(self, who=None, cred=None,
+ serverctrls=None, clientctrls=None):
+ if self.url.startswith('fakepool://memory'):
+ if self.url not in FakeShelves:
+ FakeShelves[self.url] = FakeShelve()
+ self.db = FakeShelves[self.url]
+ else:
+ self.db = shelve.open(self.url[11:])
+
+ if not who:
+ who = 'cn=Admin'
+ if not cred:
+ cred = 'password'
+
+ super(FakeLdapPool, self).simple_bind_s(who=who, cred=cred,
+ serverctrls=serverctrls,
+ clientctrls=clientctrls)
+
+ def unbind_ext_s(self):
+ '''Added to extend FakeLdap as connector class.'''
+ pass
+
+
+class FakeLdapNoSubtreeDelete(FakeLdap):
+ """FakeLdap subclass that does not support subtree delete
+
+ Same as FakeLdap except delete will throw the LDAP error
+ ldap.NOT_ALLOWED_ON_NONLEAF if there is an attempt to delete
+ an entry that has children.
+ """
+
+ def delete_ext_s(self, dn, serverctrls, clientctrls=None):
+ """Remove the ldap object at specified dn."""
+ if server_fail:
+ raise ldap.SERVER_DOWN
+
+ try:
+ children = self._getChildren(dn)
+ if children:
+ raise ldap.NOT_ALLOWED_ON_NONLEAF
+
+ except KeyError:
+ LOG.debug('delete item failed: dn=%s not found.',
+ core.utf8_decode(dn))
+ raise ldap.NO_SUCH_OBJECT
+ super(FakeLdapNoSubtreeDelete, self).delete_ext_s(dn,
+ serverctrls,
+ clientctrls)
diff --git a/keystone-moon/keystone/tests/unit/federation_fixtures.py b/keystone-moon/keystone/tests/unit/federation_fixtures.py
new file mode 100644
index 00000000..d4527d9c
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/federation_fixtures.py
@@ -0,0 +1,28 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+
+IDP_ENTITY_ID = 'https://localhost/v3/OS-FEDERATION/saml2/idp'
+IDP_SSO_ENDPOINT = 'https://localhost/v3/OS-FEDERATION/saml2/SSO'
+
+# Organization info
+IDP_ORGANIZATION_NAME = 'ACME INC'
+IDP_ORGANIZATION_DISPLAY_NAME = 'ACME'
+IDP_ORGANIZATION_URL = 'https://acme.example.com'
+
+# Contact info
+IDP_CONTACT_COMPANY = 'ACME Sub'
+IDP_CONTACT_GIVEN_NAME = 'Joe'
+IDP_CONTACT_SURNAME = 'Hacker'
+IDP_CONTACT_EMAIL = 'joe@acme.example.com'
+IDP_CONTACT_TELEPHONE_NUMBER = '1234567890'
+IDP_CONTACT_TYPE = 'technical'
diff --git a/keystone-moon/keystone/tests/unit/filtering.py b/keystone-moon/keystone/tests/unit/filtering.py
new file mode 100644
index 00000000..1a31a23f
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/filtering.py
@@ -0,0 +1,96 @@
+# Copyright 2013 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import uuid
+
+from oslo_config import cfg
+
+
+CONF = cfg.CONF
+
+
+class FilterTests(object):
+
+ # Provide support for checking if a batch of list items all
+ # exist within a contiguous range in a total list
+ def _match_with_list(self, this_batch, total_list,
+ batch_size=None,
+ list_start=None, list_end=None):
+ if batch_size is None:
+ batch_size = len(this_batch)
+ if list_start is None:
+ list_start = 0
+ if list_end is None:
+ list_end = len(total_list)
+ for batch_item in range(0, batch_size):
+ found = False
+ for list_item in range(list_start, list_end):
+ if this_batch[batch_item]['id'] == total_list[list_item]['id']:
+ found = True
+ self.assertTrue(found)
+
+ def _create_entity(self, entity_type):
+ f = getattr(self.identity_api, 'create_%s' % entity_type, None)
+ if f is None:
+ f = getattr(self.assignment_api, 'create_%s' % entity_type)
+ return f
+
+ def _delete_entity(self, entity_type):
+ f = getattr(self.identity_api, 'delete_%s' % entity_type, None)
+ if f is None:
+ f = getattr(self.assignment_api, 'delete_%s' % entity_type)
+ return f
+
+ def _list_entities(self, entity_type):
+ f = getattr(self.identity_api, 'list_%ss' % entity_type, None)
+ if f is None:
+ f = getattr(self.assignment_api, 'list_%ss' % entity_type)
+ return f
+
+ def _create_one_entity(self, entity_type, domain_id, name):
+ new_entity = {'name': name,
+ 'domain_id': domain_id}
+ if entity_type in ['user', 'group']:
+ # The manager layer creates the ID for users and groups
+ new_entity = self._create_entity(entity_type)(new_entity)
+ else:
+ new_entity['id'] = '0000' + uuid.uuid4().hex
+ self._create_entity(entity_type)(new_entity['id'], new_entity)
+ return new_entity
+
+ def _create_test_data(self, entity_type, number, domain_id=None,
+ name_dict=None):
+ """Create entity test data
+
+ :param entity_type: type of entity to create, e.g. 'user', group' etc.
+ :param number: number of entities to create,
+ :param domain_id: if not defined, all users will be created in the
+ default domain.
+ :param name_dict: optional dict containing entity number and name pairs
+
+ """
+ entity_list = []
+ if domain_id is None:
+ domain_id = CONF.identity.default_domain_id
+ name_dict = name_dict or {}
+ for x in range(number):
+ # If this index has a name defined in the name_dict, then use it
+ name = name_dict.get(x, uuid.uuid4().hex)
+ new_entity = self._create_one_entity(entity_type, domain_id, name)
+ entity_list.append(new_entity)
+ return entity_list
+
+ def _delete_test_data(self, entity_type, entity_list):
+ for entity in entity_list:
+ self._delete_entity(entity_type)(entity['id'])
diff --git a/keystone-moon/keystone/tests/unit/identity/__init__.py b/keystone-moon/keystone/tests/unit/identity/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/identity/__init__.py
diff --git a/keystone-moon/keystone/tests/unit/identity/test_core.py b/keystone-moon/keystone/tests/unit/identity/test_core.py
new file mode 100644
index 00000000..6c8faebb
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/identity/test_core.py
@@ -0,0 +1,125 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""Unit tests for core identity behavior."""
+
+import os
+import uuid
+
+import mock
+from oslo_config import cfg
+
+from keystone import exception
+from keystone import identity
+from keystone.tests import unit as tests
+from keystone.tests.unit.ksfixtures import database
+
+
+CONF = cfg.CONF
+
+
+class TestDomainConfigs(tests.BaseTestCase):
+
+ def setUp(self):
+ super(TestDomainConfigs, self).setUp()
+ self.addCleanup(CONF.reset)
+
+ self.tmp_dir = tests.dirs.tmp()
+ CONF.set_override('domain_config_dir', self.tmp_dir, 'identity')
+
+ def test_config_for_nonexistent_domain(self):
+ """Having a config for a non-existent domain will be ignored.
+
+ There are no assertions in this test because there are no side
+ effects. If there is a config file for a domain that does not
+ exist it should be ignored.
+
+ """
+ domain_id = uuid.uuid4().hex
+ domain_config_filename = os.path.join(self.tmp_dir,
+ 'keystone.%s.conf' % domain_id)
+ self.addCleanup(lambda: os.remove(domain_config_filename))
+ with open(domain_config_filename, 'w'):
+ """Write an empty config file."""
+
+ e = exception.DomainNotFound(domain_id=domain_id)
+ mock_assignment_api = mock.Mock()
+ mock_assignment_api.get_domain_by_name.side_effect = e
+
+ domain_config = identity.DomainConfigs()
+ fake_standard_driver = None
+ domain_config.setup_domain_drivers(fake_standard_driver,
+ mock_assignment_api)
+
+ def test_config_for_dot_name_domain(self):
+ # Ensure we can get the right domain name which has dots within it
+ # from filename.
+ domain_config_filename = os.path.join(self.tmp_dir,
+ 'keystone.abc.def.com.conf')
+ with open(domain_config_filename, 'w'):
+ """Write an empty config file."""
+ self.addCleanup(os.remove, domain_config_filename)
+
+ with mock.patch.object(identity.DomainConfigs,
+ '_load_config_from_file') as mock_load_config:
+ domain_config = identity.DomainConfigs()
+ fake_assignment_api = None
+ fake_standard_driver = None
+ domain_config.setup_domain_drivers(fake_standard_driver,
+ fake_assignment_api)
+ mock_load_config.assert_called_once_with(fake_assignment_api,
+ [domain_config_filename],
+ 'abc.def.com')
+
+
+class TestDatabaseDomainConfigs(tests.TestCase):
+
+ def setUp(self):
+ super(TestDatabaseDomainConfigs, self).setUp()
+ self.useFixture(database.Database())
+ self.load_backends()
+
+ def test_domain_config_in_database_disabled_by_default(self):
+ self.assertFalse(CONF.identity.domain_configurations_from_database)
+
+ def test_loading_config_from_database(self):
+ CONF.set_override('domain_configurations_from_database', True,
+ 'identity')
+ domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.resource_api.create_domain(domain['id'], domain)
+ # Override two config options for our domain
+ conf = {'ldap': {'url': uuid.uuid4().hex,
+ 'suffix': uuid.uuid4().hex},
+ 'identity': {
+ 'driver': 'keystone.identity.backends.ldap.Identity'}}
+ self.domain_config_api.create_config(domain['id'], conf)
+ fake_standard_driver = None
+ domain_config = identity.DomainConfigs()
+ domain_config.setup_domain_drivers(fake_standard_driver,
+ self.resource_api)
+ # Make sure our two overrides are in place, and others are not affected
+ res = domain_config.get_domain_conf(domain['id'])
+ self.assertEqual(conf['ldap']['url'], res.ldap.url)
+ self.assertEqual(conf['ldap']['suffix'], res.ldap.suffix)
+ self.assertEqual(CONF.ldap.query_scope, res.ldap.query_scope)
+
+ # Now turn off using database domain configuration and check that the
+ # default config file values are now seen instead of the overrides.
+ CONF.set_override('domain_configurations_from_database', False,
+ 'identity')
+ domain_config = identity.DomainConfigs()
+ domain_config.setup_domain_drivers(fake_standard_driver,
+ self.resource_api)
+ res = domain_config.get_domain_conf(domain['id'])
+ self.assertEqual(CONF.ldap.url, res.ldap.url)
+ self.assertEqual(CONF.ldap.suffix, res.ldap.suffix)
+ self.assertEqual(CONF.ldap.query_scope, res.ldap.query_scope)
diff --git a/keystone-moon/keystone/tests/unit/identity_mapping.py b/keystone-moon/keystone/tests/unit/identity_mapping.py
new file mode 100644
index 00000000..7fb8063f
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/identity_mapping.py
@@ -0,0 +1,23 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from keystone.common import sql
+from keystone.identity.mapping_backends import sql as mapping_sql
+
+
+def list_id_mappings():
+ """List all id_mappings for testing purposes."""
+
+ a_session = sql.get_session()
+ refs = a_session.query(mapping_sql.IDMapping).all()
+ return [x.to_dict() for x in refs]
diff --git a/keystone-moon/keystone/tests/unit/ksfixtures/__init__.py b/keystone-moon/keystone/tests/unit/ksfixtures/__init__.py
new file mode 100644
index 00000000..81b80298
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/ksfixtures/__init__.py
@@ -0,0 +1,15 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from keystone.tests.unit.ksfixtures.cache import Cache # noqa
+from keystone.tests.unit.ksfixtures.key_repository import KeyRepository # noqa
diff --git a/keystone-moon/keystone/tests/unit/ksfixtures/appserver.py b/keystone-moon/keystone/tests/unit/ksfixtures/appserver.py
new file mode 100644
index 00000000..ea1e6255
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/ksfixtures/appserver.py
@@ -0,0 +1,79 @@
+# Copyright 2013 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import absolute_import
+
+import fixtures
+from oslo_config import cfg
+from paste import deploy
+
+from keystone.common import environment
+
+
+CONF = cfg.CONF
+
+MAIN = 'main'
+ADMIN = 'admin'
+
+
+class AppServer(fixtures.Fixture):
+ """A fixture for managing an application server instance.
+ """
+
+ def __init__(self, config, name, cert=None, key=None, ca=None,
+ cert_required=False, host='127.0.0.1', port=0):
+ super(AppServer, self).__init__()
+ self.config = config
+ self.name = name
+ self.cert = cert
+ self.key = key
+ self.ca = ca
+ self.cert_required = cert_required
+ self.host = host
+ self.port = port
+
+ def setUp(self):
+ super(AppServer, self).setUp()
+
+ app = deploy.loadapp(self.config, name=self.name)
+ self.server = environment.Server(app, self.host, self.port)
+ self._setup_SSL_if_requested()
+ self.server.start(key='socket')
+
+ # some tests need to know the port we ran on.
+ self.port = self.server.socket_info['socket'][1]
+ self._update_config_opt()
+
+ self.addCleanup(self.server.stop)
+
+ def _setup_SSL_if_requested(self):
+ # TODO(dstanek): fix environment.Server to take a SSLOpts instance
+ # so that the params are either always set or not
+ if (self.cert is not None and
+ self.ca is not None and
+ self.key is not None):
+ self.server.set_ssl(certfile=self.cert,
+ keyfile=self.key,
+ ca_certs=self.ca,
+ cert_required=self.cert_required)
+
+ def _update_config_opt(self):
+ """Updates the config with the actual port used."""
+ opt_name = self._get_config_option_for_section_name()
+ CONF.set_override(opt_name, self.port, group='eventlet_server')
+
+ def _get_config_option_for_section_name(self):
+ """Maps Paster config section names to port option names."""
+ return {'admin': 'admin_port', 'main': 'public_port'}[self.name]
diff --git a/keystone-moon/keystone/tests/unit/ksfixtures/cache.py b/keystone-moon/keystone/tests/unit/ksfixtures/cache.py
new file mode 100644
index 00000000..74566f1e
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/ksfixtures/cache.py
@@ -0,0 +1,36 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import fixtures
+
+from keystone.common import cache
+
+
+class Cache(fixtures.Fixture):
+ """A fixture for setting up and tearing down the cache between test cases.
+ """
+
+ def setUp(self):
+ super(Cache, self).setUp()
+
+ # NOTE(dstanek): We must remove the existing cache backend in the
+ # setUp instead of the tearDown because it defaults to a no-op cache
+ # and we want the configure call below to create the correct backend.
+
+ # NOTE(morganfainberg): The only way to reconfigure the CacheRegion
+ # object on each setUp() call is to remove the .backend property.
+ if cache.REGION.is_configured:
+ del cache.REGION.backend
+
+ # ensure the cache region instance is setup
+ cache.configure_cache_region(cache.REGION)
diff --git a/keystone-moon/keystone/tests/unit/ksfixtures/database.py b/keystone-moon/keystone/tests/unit/ksfixtures/database.py
new file mode 100644
index 00000000..15597539
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/ksfixtures/database.py
@@ -0,0 +1,124 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import functools
+import os
+import shutil
+
+import fixtures
+from oslo_config import cfg
+from oslo_db import options as db_options
+from oslo_db.sqlalchemy import migration
+
+from keystone.common import sql
+from keystone.common.sql import migration_helpers
+from keystone.tests import unit as tests
+
+
+CONF = cfg.CONF
+
+
+def run_once(f):
+ """A decorator to ensure the decorated function is only executed once.
+
+ The decorated function cannot expect any arguments.
+ """
+ @functools.wraps(f)
+ def wrapper():
+ if not wrapper.already_ran:
+ f()
+ wrapper.already_ran = True
+ wrapper.already_ran = False
+ return wrapper
+
+
+def _setup_database(extensions=None):
+ if CONF.database.connection != tests.IN_MEM_DB_CONN_STRING:
+ db = tests.dirs.tmp('test.db')
+ pristine = tests.dirs.tmp('test.db.pristine')
+
+ if os.path.exists(db):
+ os.unlink(db)
+ if not os.path.exists(pristine):
+ migration.db_sync(sql.get_engine(),
+ migration_helpers.find_migrate_repo())
+ for extension in (extensions or []):
+ migration_helpers.sync_database_to_version(extension=extension)
+ shutil.copyfile(db, pristine)
+ else:
+ shutil.copyfile(pristine, db)
+
+
+# NOTE(I159): Every execution all the options will be cleared. The method must
+# be called at the every fixture initialization.
+def initialize_sql_session():
+ # Make sure the DB is located in the correct location, in this case set
+ # the default value, as this should be able to be overridden in some
+ # test cases.
+ db_options.set_defaults(
+ CONF,
+ connection=tests.IN_MEM_DB_CONN_STRING)
+
+
+@run_once
+def _load_sqlalchemy_models():
+ """Find all modules containing SQLAlchemy models and import them.
+
+ This creates more consistent, deterministic test runs because tables
+ for all core and extension models are always created in the test
+ database. We ensure this by importing all modules that contain model
+ definitions.
+
+ The database schema during test runs is created using reflection.
+ Reflection is simply SQLAlchemy taking the model definitions for
+ all models currently imported and making tables for each of them.
+ The database schema created during test runs may vary between tests
+ as more models are imported. Importing all models at the start of
+ the test run avoids this problem.
+
+ """
+ keystone_root = os.path.normpath(os.path.join(
+ os.path.dirname(__file__), '..', '..', '..'))
+ for root, dirs, files in os.walk(keystone_root):
+ # NOTE(morganfainberg): Slice the keystone_root off the root to ensure
+ # we do not end up with a module name like:
+ # Users.home.openstack.keystone.assignment.backends.sql
+ root = root[len(keystone_root):]
+ if root.endswith('backends') and 'sql.py' in files:
+ # The root will be prefixed with an instance of os.sep, which will
+ # make the root after replacement '.<root>', the 'keystone' part
+ # of the module path is always added to the front
+ module_name = ('keystone.%s.sql' %
+ root.replace(os.sep, '.').lstrip('.'))
+ __import__(module_name)
+
+
+class Database(fixtures.Fixture):
+ """A fixture for setting up and tearing down a database.
+
+ """
+
+ def __init__(self, extensions=None):
+ super(Database, self).__init__()
+ self._extensions = extensions
+ initialize_sql_session()
+ _load_sqlalchemy_models()
+
+ def setUp(self):
+ super(Database, self).setUp()
+ _setup_database(extensions=self._extensions)
+
+ self.engine = sql.get_engine()
+ sql.ModelBase.metadata.create_all(bind=self.engine)
+ self.addCleanup(sql.cleanup)
+ self.addCleanup(sql.ModelBase.metadata.drop_all, bind=self.engine)
diff --git a/keystone-moon/keystone/tests/unit/ksfixtures/hacking.py b/keystone-moon/keystone/tests/unit/ksfixtures/hacking.py
new file mode 100644
index 00000000..47ef6b4b
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/ksfixtures/hacking.py
@@ -0,0 +1,489 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+# NOTE(morganfainberg) This file shouldn't have flake8 run on it as it has
+# code examples that will fail normal CI pep8/flake8 tests. This is expected.
+# The code has been moved here to ensure that proper tests occur on the
+# test_hacking_checks test cases.
+# flake8: noqa
+
+import fixtures
+
+
+class HackingCode(fixtures.Fixture):
+ """A fixture to house the various code examples for the keystone hacking
+ style checks.
+ """
+
+ mutable_default_args = {
+ 'code': """
+ def f():
+ pass
+
+ def f(a, b='', c=None):
+ pass
+
+ def f(bad=[]):
+ pass
+
+ def f(foo, bad=[], more_bad=[x for x in range(3)]):
+ pass
+
+ def f(foo, bad={}):
+ pass
+
+ def f(foo, bad={}, another_bad=[], fine=None):
+ pass
+
+ def f(bad=[]): # noqa
+ pass
+
+ def funcs(bad=dict(), more_bad=list(), even_more_bad=set()):
+ "creating mutables through builtins"
+
+ def funcs(bad=something(), more_bad=some_object.something()):
+ "defaults from any functions"
+
+ def f(bad=set(), more_bad={x for x in range(3)},
+ even_more_bad={1, 2, 3}):
+ "set and set comprehession"
+
+ def f(bad={x: x for x in range(3)}):
+ "dict comprehension"
+ """,
+ 'expected_errors': [
+ (7, 10, 'K001'),
+ (10, 15, 'K001'),
+ (10, 29, 'K001'),
+ (13, 15, 'K001'),
+ (16, 15, 'K001'),
+ (16, 31, 'K001'),
+ (22, 14, 'K001'),
+ (22, 31, 'K001'),
+ (22, 53, 'K001'),
+ (25, 14, 'K001'),
+ (25, 36, 'K001'),
+ (28, 10, 'K001'),
+ (28, 27, 'K001'),
+ (29, 21, 'K001'),
+ (32, 11, 'K001'),
+ ]}
+
+ comments_begin_with_space = {
+ 'code': """
+ # This is a good comment
+
+ #This is a bad one
+
+ # This is alright and can
+ # be continued with extra indentation
+ # if that's what the developer wants.
+ """,
+ 'expected_errors': [
+ (3, 0, 'K002'),
+ ]}
+
+ asserting_none_equality = {
+ 'code': """
+ class Test(object):
+
+ def test(self):
+ self.assertEqual('', '')
+ self.assertEqual('', None)
+ self.assertEqual(None, '')
+ self.assertNotEqual('', None)
+ self.assertNotEqual(None, '')
+ self.assertNotEqual('', None) # noqa
+ self.assertNotEqual(None, '') # noqa
+ """,
+ 'expected_errors': [
+ (5, 8, 'K003'),
+ (6, 8, 'K003'),
+ (7, 8, 'K004'),
+ (8, 8, 'K004'),
+ ]}
+
+ assert_no_translations_for_debug_logging = {
+ 'code': """
+ import logging
+ import logging as stlib_logging
+ from keystone.i18n import _
+ from keystone.i18n import _ as oslo_i18n
+ from keystone.openstack.common import log
+ from keystone.openstack.common import log as oslo_logging
+
+ # stdlib logging
+ L0 = logging.getLogger()
+ L0.debug(_('text'))
+ class C:
+ def __init__(self):
+ L0.debug(oslo_i18n('text', {}))
+
+ # stdlib logging w/ alias and specifying a logger
+ class C:
+ def __init__(self):
+ self.L1 = logging.getLogger(__name__)
+ def m(self):
+ self.L1.debug(
+ _('text'), {}
+ )
+
+ # oslo logging and specifying a logger
+ L2 = log.getLogger(__name__)
+ L2.debug(oslo_i18n('text'))
+
+ # oslo logging w/ alias
+ class C:
+ def __init__(self):
+ self.L3 = oslo_logging.getLogger()
+ self.L3.debug(_('text'))
+
+ # translation on a separate line
+ msg = _('text')
+ L2.debug(msg)
+
+ # this should not fail
+ if True:
+ msg = _('message %s') % X
+ L2.error(msg)
+ raise TypeError(msg)
+ if True:
+ msg = 'message'
+ L2.debug(msg)
+
+ # this should not fail
+ if True:
+ if True:
+ msg = _('message')
+ else:
+ msg = _('message')
+ L2.debug(msg)
+ raise Exception(msg)
+ """,
+ 'expected_errors': [
+ (10, 9, 'K005'),
+ (13, 17, 'K005'),
+ (21, 12, 'K005'),
+ (26, 9, 'K005'),
+ (32, 22, 'K005'),
+ (36, 9, 'K005'),
+ ]
+ }
+
+ oslo_namespace_imports = {
+ 'code': """
+ import oslo.utils
+ import oslo_utils
+ import oslo.utils.encodeutils
+ import oslo_utils.encodeutils
+ from oslo import utils
+ from oslo.utils import encodeutils
+ from oslo_utils import encodeutils
+
+ import oslo.serialization
+ import oslo_serialization
+ import oslo.serialization.jsonutils
+ import oslo_serialization.jsonutils
+ from oslo import serialization
+ from oslo.serialization import jsonutils
+ from oslo_serialization import jsonutils
+
+ import oslo.messaging
+ import oslo_messaging
+ import oslo.messaging.conffixture
+ import oslo_messaging.conffixture
+ from oslo import messaging
+ from oslo.messaging import conffixture
+ from oslo_messaging import conffixture
+
+ import oslo.db
+ import oslo_db
+ import oslo.db.api
+ import oslo_db.api
+ from oslo import db
+ from oslo.db import api
+ from oslo_db import api
+
+ import oslo.config
+ import oslo_config
+ import oslo.config.cfg
+ import oslo_config.cfg
+ from oslo import config
+ from oslo.config import cfg
+ from oslo_config import cfg
+
+ import oslo.i18n
+ import oslo_i18n
+ import oslo.i18n.log
+ import oslo_i18n.log
+ from oslo import i18n
+ from oslo.i18n import log
+ from oslo_i18n import log
+ """,
+ 'expected_errors': [
+ (1, 0, 'K333'),
+ (3, 0, 'K333'),
+ (5, 0, 'K333'),
+ (6, 0, 'K333'),
+ (9, 0, 'K333'),
+ (11, 0, 'K333'),
+ (13, 0, 'K333'),
+ (14, 0, 'K333'),
+ (17, 0, 'K333'),
+ (19, 0, 'K333'),
+ (21, 0, 'K333'),
+ (22, 0, 'K333'),
+ (25, 0, 'K333'),
+ (27, 0, 'K333'),
+ (29, 0, 'K333'),
+ (30, 0, 'K333'),
+ (33, 0, 'K333'),
+ (35, 0, 'K333'),
+ (37, 0, 'K333'),
+ (38, 0, 'K333'),
+ (41, 0, 'K333'),
+ (43, 0, 'K333'),
+ (45, 0, 'K333'),
+ (46, 0, 'K333'),
+ ],
+ }
+
+ dict_constructor = {
+ 'code': """
+ lower_res = {k.lower(): v for k, v in six.iteritems(res[1])}
+ fool = dict(a='a', b='b')
+ lower_res = dict((k.lower(), v) for k, v in six.iteritems(res[1]))
+ attrs = dict([(k, _from_json(v))])
+ dict([[i,i] for i in range(3)])
+ dict(({1:2}))
+ """,
+ 'expected_errors': [
+ (3, 0, 'K008'),
+ (4, 0, 'K008'),
+ (5, 0, 'K008'),
+ ]}
+
+
+class HackingLogging(fixtures.Fixture):
+
+ shared_imports = """
+ import logging
+ import logging as stlib_logging
+ from keystone.i18n import _
+ from keystone.i18n import _ as oslo_i18n
+ from keystone.i18n import _LC
+ from keystone.i18n import _LE
+ from keystone.i18n import _LE as error_hint
+ from keystone.i18n import _LI
+ from keystone.i18n import _LW
+ from keystone.openstack.common import log
+ from keystone.openstack.common import log as oslo_logging
+ """
+
+ examples = [
+ {
+ 'code': """
+ # stdlib logging
+ LOG = logging.getLogger()
+ LOG.info(_('text'))
+ class C:
+ def __init__(self):
+ LOG.warn(oslo_i18n('text', {}))
+ LOG.warn(_LW('text', {}))
+ """,
+ 'expected_errors': [
+ (3, 9, 'K006'),
+ (6, 17, 'K006'),
+ ],
+ },
+ {
+ 'code': """
+ # stdlib logging w/ alias and specifying a logger
+ class C:
+ def __init__(self):
+ self.L = logging.getLogger(__name__)
+ def m(self):
+ self.L.warning(
+ _('text'), {}
+ )
+ self.L.warning(
+ _LW('text'), {}
+ )
+ """,
+ 'expected_errors': [
+ (7, 12, 'K006'),
+ ],
+ },
+ {
+ 'code': """
+ # oslo logging and specifying a logger
+ L = log.getLogger(__name__)
+ L.error(oslo_i18n('text'))
+ L.error(error_hint('text'))
+ """,
+ 'expected_errors': [
+ (3, 8, 'K006'),
+ ],
+ },
+ {
+ 'code': """
+ # oslo logging w/ alias
+ class C:
+ def __init__(self):
+ self.LOG = oslo_logging.getLogger()
+ self.LOG.critical(_('text'))
+ self.LOG.critical(_LC('text'))
+ """,
+ 'expected_errors': [
+ (5, 26, 'K006'),
+ ],
+ },
+ {
+ 'code': """
+ LOG = log.getLogger(__name__)
+ # translation on a separate line
+ msg = _('text')
+ LOG.exception(msg)
+ msg = _LE('text')
+ LOG.exception(msg)
+ """,
+ 'expected_errors': [
+ (4, 14, 'K006'),
+ ],
+ },
+ {
+ 'code': """
+ LOG = logging.getLogger()
+
+ # ensure the correct helper is being used
+ LOG.warn(_LI('this should cause an error'))
+
+ # debug should not allow any helpers either
+ LOG.debug(_LI('this should cause an error'))
+ """,
+ 'expected_errors': [
+ (4, 9, 'K006'),
+ (7, 10, 'K005'),
+ ],
+ },
+ {
+ 'code': """
+ # this should not be an error
+ L = log.getLogger(__name__)
+ msg = _('text')
+ L.warn(msg)
+ raise Exception(msg)
+ """,
+ 'expected_errors': [],
+ },
+ {
+ 'code': """
+ L = log.getLogger(__name__)
+ def f():
+ msg = _('text')
+ L2.warn(msg)
+ something = True # add an extra statement here
+ raise Exception(msg)
+ """,
+ 'expected_errors': [],
+ },
+ {
+ 'code': """
+ LOG = log.getLogger(__name__)
+ def func():
+ msg = _('text')
+ LOG.warn(msg)
+ raise Exception('some other message')
+ """,
+ 'expected_errors': [
+ (4, 13, 'K006'),
+ ],
+ },
+ {
+ 'code': """
+ LOG = log.getLogger(__name__)
+ if True:
+ msg = _('text')
+ else:
+ msg = _('text')
+ LOG.warn(msg)
+ raise Exception(msg)
+ """,
+ 'expected_errors': [
+ ],
+ },
+ {
+ 'code': """
+ LOG = log.getLogger(__name__)
+ if True:
+ msg = _('text')
+ else:
+ msg = _('text')
+ LOG.warn(msg)
+ """,
+ 'expected_errors': [
+ (6, 9, 'K006'),
+ ],
+ },
+ {
+ 'code': """
+ LOG = log.getLogger(__name__)
+ msg = _LW('text')
+ LOG.warn(msg)
+ raise Exception(msg)
+ """,
+ 'expected_errors': [
+ (3, 9, 'K007'),
+ ],
+ },
+ {
+ 'code': """
+ LOG = log.getLogger(__name__)
+ msg = _LW('text')
+ LOG.warn(msg)
+ msg = _('something else')
+ raise Exception(msg)
+ """,
+ 'expected_errors': [],
+ },
+ {
+ 'code': """
+ LOG = log.getLogger(__name__)
+ msg = _LW('hello %s') % 'world'
+ LOG.warn(msg)
+ raise Exception(msg)
+ """,
+ 'expected_errors': [
+ (3, 9, 'K007'),
+ ],
+ },
+ {
+ 'code': """
+ LOG = log.getLogger(__name__)
+ msg = _LW('hello %s') % 'world'
+ LOG.warn(msg)
+ """,
+ 'expected_errors': [],
+ },
+ {
+ 'code': """
+ # this should not be an error
+ LOG = log.getLogger(__name__)
+ try:
+ something = True
+ except AssertionError as e:
+ LOG.warning(six.text_type(e))
+ raise exception.Unauthorized(e)
+ """,
+ 'expected_errors': [],
+ },
+ ]
diff --git a/keystone-moon/keystone/tests/unit/ksfixtures/key_repository.py b/keystone-moon/keystone/tests/unit/ksfixtures/key_repository.py
new file mode 100644
index 00000000..d1ac2ab4
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/ksfixtures/key_repository.py
@@ -0,0 +1,34 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import shutil
+import tempfile
+
+import fixtures
+
+from keystone.token.providers.fernet import utils
+
+
+class KeyRepository(fixtures.Fixture):
+ def __init__(self, config_fixture):
+ super(KeyRepository, self).__init__()
+ self.config_fixture = config_fixture
+
+ def setUp(self):
+ super(KeyRepository, self).setUp()
+ directory = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, directory)
+ self.config_fixture.config(group='fernet_tokens',
+ key_repository=directory)
+
+ utils.create_key_directory()
+ utils.initialize_key_repository()
diff --git a/keystone-moon/keystone/tests/unit/ksfixtures/temporaryfile.py b/keystone-moon/keystone/tests/unit/ksfixtures/temporaryfile.py
new file mode 100644
index 00000000..a4be06f8
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/ksfixtures/temporaryfile.py
@@ -0,0 +1,29 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import tempfile
+
+import fixtures
+
+
+class SecureTempFile(fixtures.Fixture):
+ """A fixture for creating a secure temp file."""
+
+ def setUp(self):
+ super(SecureTempFile, self).setUp()
+
+ _fd, self.file_name = tempfile.mkstemp()
+ # Make sure no file descriptors are leaked, close the unused FD.
+ os.close(_fd)
+ self.addCleanup(os.remove, self.file_name)
diff --git a/keystone-moon/keystone/tests/unit/mapping_fixtures.py b/keystone-moon/keystone/tests/unit/mapping_fixtures.py
new file mode 100644
index 00000000..0892ada5
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/mapping_fixtures.py
@@ -0,0 +1,1023 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""Fixtures for Federation Mapping."""
+
+EMPLOYEE_GROUP_ID = "0cd5e9"
+CONTRACTOR_GROUP_ID = "85a868"
+TESTER_GROUP_ID = "123"
+TESTER_GROUP_NAME = "tester"
+DEVELOPER_GROUP_ID = "xyz"
+DEVELOPER_GROUP_NAME = "Developer"
+CONTRACTOR_GROUP_NAME = "Contractor"
+DEVELOPER_GROUP_DOMAIN_NAME = "outsourcing"
+DEVELOPER_GROUP_DOMAIN_ID = "5abc43"
+FEDERATED_DOMAIN = "Federated"
+LOCAL_DOMAIN = "Local"
+
+# Mapping summary:
+# LastName Smith & Not Contractor or SubContractor -> group 0cd5e9
+# FirstName Jill & Contractor or SubContractor -> to group 85a868
+MAPPING_SMALL = {
+ "rules": [
+ {
+ "local": [
+ {
+ "group": {
+ "id": EMPLOYEE_GROUP_ID
+ }
+ },
+ {
+ "user": {
+ "name": "{0}"
+ }
+ }
+ ],
+ "remote": [
+ {
+ "type": "UserName"
+ },
+ {
+ "type": "orgPersonType",
+ "not_any_of": [
+ "Contractor",
+ "SubContractor"
+ ]
+ },
+ {
+ "type": "LastName",
+ "any_one_of": [
+ "Bo"
+ ]
+ }
+ ]
+ },
+ {
+ "local": [
+ {
+ "group": {
+ "id": CONTRACTOR_GROUP_ID
+ }
+ },
+ {
+ "user": {
+ "name": "{0}"
+ }
+ }
+ ],
+ "remote": [
+ {
+ "type": "UserName"
+ },
+ {
+ "type": "orgPersonType",
+ "any_one_of": [
+ "Contractor",
+ "SubContractor"
+ ]
+ },
+ {
+ "type": "FirstName",
+ "any_one_of": [
+ "Jill"
+ ]
+ }
+ ]
+ }
+ ]
+}
+
+# Mapping summary:
+# orgPersonType Admin or Big Cheese -> name {0} {1} email {2} and group 0cd5e9
+# orgPersonType Customer -> user name {0} email {1}
+# orgPersonType Test and email ^@example.com$ -> group 123 and xyz
+MAPPING_LARGE = {
+ "rules": [
+ {
+ "local": [
+ {
+ "user": {
+ "name": "{0} {1}",
+ "email": "{2}"
+ },
+ "group": {
+ "id": EMPLOYEE_GROUP_ID
+ }
+ }
+ ],
+ "remote": [
+ {
+ "type": "FirstName"
+ },
+ {
+ "type": "LastName"
+ },
+ {
+ "type": "Email"
+ },
+ {
+ "type": "orgPersonType",
+ "any_one_of": [
+ "Admin",
+ "Big Cheese"
+ ]
+ }
+ ]
+ },
+ {
+ "local": [
+ {
+ "user": {
+ "name": "{0}",
+ "email": "{1}"
+ }
+ }
+ ],
+ "remote": [
+ {
+ "type": "UserName"
+ },
+ {
+ "type": "Email"
+ },
+ {
+ "type": "orgPersonType",
+ "not_any_of": [
+ "Admin",
+ "Employee",
+ "Contractor",
+ "Tester"
+ ]
+ }
+ ]
+ },
+ {
+ "local": [
+ {
+ "group": {
+ "id": TESTER_GROUP_ID
+ }
+ },
+ {
+ "group": {
+ "id": DEVELOPER_GROUP_ID
+ }
+ },
+ {
+ "user": {
+ "name": "{0}"
+ }
+ }
+ ],
+ "remote": [
+ {
+ "type": "UserName"
+ },
+ {
+ "type": "orgPersonType",
+ "any_one_of": [
+ "Tester"
+ ]
+ },
+ {
+ "type": "Email",
+ "any_one_of": [
+ ".*@example.com$"
+ ],
+ "regex": True
+ }
+ ]
+ }
+ ]
+}
+
+MAPPING_BAD_REQ = {
+ "rules": [
+ {
+ "local": [
+ {
+ "user": "name"
+ }
+ ],
+ "remote": [
+ {
+ "type": "UserName",
+ "bad_requirement": [
+ "Young"
+ ]
+ }
+ ]
+ }
+ ]
+}
+
+MAPPING_BAD_VALUE = {
+ "rules": [
+ {
+ "local": [
+ {
+ "user": "name"
+ }
+ ],
+ "remote": [
+ {
+ "type": "UserName",
+ "any_one_of": "should_be_list"
+ }
+ ]
+ }
+ ]
+}
+
+MAPPING_NO_RULES = {
+ 'rules': []
+}
+
+MAPPING_NO_REMOTE = {
+ "rules": [
+ {
+ "local": [
+ {
+ "user": "name"
+ }
+ ],
+ "remote": []
+ }
+ ]
+}
+
+MAPPING_MISSING_LOCAL = {
+ "rules": [
+ {
+ "remote": [
+ {
+ "type": "UserName",
+ "any_one_of": "should_be_list"
+ }
+ ]
+ }
+ ]
+}
+
+MAPPING_WRONG_TYPE = {
+ "rules": [
+ {
+ "local": [
+ {
+ "user": "{1}"
+ }
+ ],
+ "remote": [
+ {
+ "not_type": "UserName"
+ }
+ ]
+ }
+ ]
+}
+
+MAPPING_MISSING_TYPE = {
+ "rules": [
+ {
+ "local": [
+ {
+ "user": "{1}"
+ }
+ ],
+ "remote": [
+ {}
+ ]
+ }
+ ]
+}
+
+MAPPING_EXTRA_REMOTE_PROPS_NOT_ANY_OF = {
+ "rules": [
+ {
+ "local": [
+ {
+ "group": {
+ "id": "0cd5e9"
+ }
+ },
+ {
+ "user": {
+ "name": "{0}"
+ }
+ }
+ ],
+ "remote": [
+ {
+ "type": "UserName"
+ },
+ {
+ "type": "orgPersonType",
+ "not_any_of": [
+ "SubContractor"
+ ],
+ "invalid_type": "xyz"
+ }
+ ]
+ }
+ ]
+}
+
+MAPPING_EXTRA_REMOTE_PROPS_ANY_ONE_OF = {
+ "rules": [
+ {
+ "local": [
+ {
+ "group": {
+ "id": "0cd5e9"
+ }
+ },
+ {
+ "user": {
+ "name": "{0}"
+ }
+ }
+ ],
+ "remote": [
+ {
+ "type": "UserName"
+ },
+ {
+ "type": "orgPersonType",
+ "any_one_of": [
+ "SubContractor"
+ ],
+ "invalid_type": "xyz"
+ }
+ ]
+ }
+ ]
+}
+
+MAPPING_EXTRA_REMOTE_PROPS_JUST_TYPE = {
+ "rules": [
+ {
+ "local": [
+ {
+ "group": {
+ "id": "0cd5e9"
+ }
+ },
+ {
+ "user": {
+ "name": "{0}"
+ }
+ }
+ ],
+ "remote": [
+ {
+ "type": "UserName"
+ },
+ {
+ "type": "orgPersonType",
+ "invalid_type": "xyz"
+ }
+ ]
+ }
+ ]
+}
+
+MAPPING_EXTRA_RULES_PROPS = {
+ "rules": [
+ {
+ "local": [
+ {
+ "group": {
+ "id": "0cd5e9"
+ }
+ },
+ {
+ "user": {
+ "name": "{0}"
+ }
+ }
+ ],
+ "invalid_type": {
+ "id": "xyz",
+ },
+ "remote": [
+ {
+ "type": "UserName"
+ },
+ {
+ "type": "orgPersonType",
+ "not_any_of": [
+ "SubContractor"
+ ]
+ }
+ ]
+ }
+ ]
+}
+
+MAPPING_TESTER_REGEX = {
+ "rules": [
+ {
+ "local": [
+ {
+ "user": {
+ "name": "{0}",
+ }
+ }
+ ],
+ "remote": [
+ {
+ "type": "UserName"
+ }
+ ]
+ },
+ {
+ "local": [
+ {
+ "group": {
+ "id": TESTER_GROUP_ID
+ }
+ }
+ ],
+ "remote": [
+ {
+ "type": "orgPersonType",
+ "any_one_of": [
+ ".*Tester*"
+ ],
+ "regex": True
+ }
+ ]
+ }
+ ]
+}
+
+MAPPING_DEVELOPER_REGEX = {
+ "rules": [
+ {
+ "local": [
+ {
+ "user": {
+ "name": "{0}",
+ },
+ "group": {
+ "id": DEVELOPER_GROUP_ID
+ }
+ }
+ ],
+ "remote": [
+ {
+ "type": "UserName"
+ },
+ {
+ "type": "orgPersonType",
+ "any_one_of": [
+ "Developer"
+ ],
+ },
+ {
+ "type": "Email",
+ "not_any_of": [
+ ".*@example.org$"
+ ],
+ "regex": True
+ }
+ ]
+ }
+ ]
+}
+
+MAPPING_GROUP_NAMES = {
+
+ "rules": [
+ {
+ "local": [
+ {
+ "user": {
+ "name": "{0}",
+ }
+ }
+ ],
+ "remote": [
+ {
+ "type": "UserName"
+ }
+ ]
+ },
+ {
+ "local": [
+ {
+ "group": {
+ "name": DEVELOPER_GROUP_NAME,
+ "domain": {
+ "name": DEVELOPER_GROUP_DOMAIN_NAME
+ }
+ }
+ }
+ ],
+ "remote": [
+ {
+ "type": "orgPersonType",
+ "any_one_of": [
+ "Employee"
+ ],
+ }
+ ]
+ },
+ {
+ "local": [
+ {
+ "group": {
+ "name": TESTER_GROUP_NAME,
+ "domain": {
+ "id": DEVELOPER_GROUP_DOMAIN_ID
+ }
+ }
+ }
+ ],
+ "remote": [
+ {
+ "type": "orgPersonType",
+ "any_one_of": [
+ "BuildingX"
+ ]
+ }
+ ]
+ },
+ ]
+}
+
+MAPPING_EPHEMERAL_USER = {
+ "rules": [
+ {
+ "local": [
+ {
+ "user": {
+ "name": "{0}",
+ "domain": {
+ "id": FEDERATED_DOMAIN
+ },
+ "type": "ephemeral"
+ }
+ }
+ ],
+ "remote": [
+ {
+ "type": "UserName"
+ },
+ {
+ "type": "UserName",
+ "any_one_of": [
+ "tbo"
+ ]
+ }
+ ]
+ }
+ ]
+}
+
+MAPPING_GROUPS_WHITELIST = {
+ "rules": [
+ {
+ "remote": [
+ {
+ "type": "orgPersonType",
+ "whitelist": [
+ "Developer", "Contractor"
+ ]
+ },
+ {
+ "type": "UserName"
+ }
+ ],
+ "local": [
+ {
+ "groups": "{0}",
+ "domain": {
+ "id": DEVELOPER_GROUP_DOMAIN_ID
+ }
+ },
+ {
+ "user": {
+ "name": "{1}"
+ }
+ }
+ ]
+ }
+ ]
+}
+
+MAPPING_EPHEMERAL_USER_LOCAL_DOMAIN = {
+ "rules": [
+ {
+ "local": [
+ {
+ "user": {
+ "name": "{0}",
+ "domain": {
+ "id": LOCAL_DOMAIN
+ },
+ "type": "ephemeral"
+ }
+ }
+ ],
+ "remote": [
+ {
+ "type": "UserName"
+ },
+ {
+ "type": "UserName",
+ "any_one_of": [
+ "jsmith"
+ ]
+ }
+ ]
+ }
+ ]
+}
+
+MAPPING_GROUPS_WHITELIST_MISSING_DOMAIN = {
+ "rules": [
+ {
+ "remote": [
+ {
+ "type": "orgPersonType",
+ "whitelist": [
+ "Developer", "Contractor"
+ ]
+ },
+ ],
+ "local": [
+ {
+ "groups": "{0}",
+ }
+ ]
+ }
+ ]
+}
+
+MAPPING_LOCAL_USER_LOCAL_DOMAIN = {
+ "rules": [
+ {
+ "local": [
+ {
+ "user": {
+ "name": "{0}",
+ "domain": {
+ "id": LOCAL_DOMAIN
+ },
+ "type": "local"
+ }
+ }
+ ],
+ "remote": [
+ {
+ "type": "UserName"
+ },
+ {
+ "type": "UserName",
+ "any_one_of": [
+ "jsmith"
+ ]
+ }
+ ]
+ }
+ ]
+}
+
+MAPPING_GROUPS_BLACKLIST_MULTIPLES = {
+ "rules": [
+ {
+ "remote": [
+ {
+ "type": "orgPersonType",
+ "blacklist": [
+ "Developer", "Manager"
+ ]
+ },
+ {
+ "type": "Thing" # this could be variable length!
+ },
+ {
+ "type": "UserName"
+ },
+ ],
+ "local": [
+ {
+ "groups": "{0}",
+ "domain": {
+ "id": DEVELOPER_GROUP_DOMAIN_ID
+ }
+ },
+ {
+ "user": {
+ "name": "{2}",
+ }
+ }
+ ]
+ }
+ ]
+}
+MAPPING_GROUPS_BLACKLIST = {
+ "rules": [
+ {
+ "remote": [
+ {
+ "type": "orgPersonType",
+ "blacklist": [
+ "Developer", "Manager"
+ ]
+ },
+ {
+ "type": "UserName"
+ }
+ ],
+ "local": [
+ {
+ "groups": "{0}",
+ "domain": {
+ "id": DEVELOPER_GROUP_DOMAIN_ID
+ }
+ },
+ {
+ "user": {
+ "name": "{1}"
+ }
+ }
+ ]
+ }
+ ]
+}
+
+# Excercise all possibilities of user identitfication. Values are hardcoded on
+# purpose.
+MAPPING_USER_IDS = {
+ "rules": [
+ {
+ "local": [
+ {
+ "user": {
+ "name": "{0}"
+ }
+ }
+ ],
+ "remote": [
+ {
+ "type": "UserName"
+ },
+ {
+ "type": "UserName",
+ "any_one_of": [
+ "jsmith"
+ ]
+ }
+ ]
+ },
+ {
+ "local": [
+ {
+ "user": {
+ "name": "{0}",
+ "domain": {
+ "id": "federated"
+ }
+ }
+ }
+ ],
+ "remote": [
+ {
+ "type": "UserName"
+ },
+ {
+ "type": "UserName",
+ "any_one_of": [
+ "tbo"
+ ]
+ }
+ ]
+ },
+ {
+ "local": [
+ {
+ "user": {
+ "id": "{0}"
+ }
+ }
+ ],
+ "remote": [
+ {
+ "type": "UserName"
+ },
+ {
+ "type": "UserName",
+ "any_one_of": [
+ "bob"
+ ]
+ }
+ ]
+ },
+ {
+ "local": [
+ {
+ "user": {
+ "id": "abc123",
+ "name": "{0}",
+ "domain": {
+ "id": "federated"
+ }
+ }
+ }
+ ],
+ "remote": [
+ {
+ "type": "UserName"
+ },
+ {
+ "type": "UserName",
+ "any_one_of": [
+ "bwilliams"
+ ]
+ }
+ ]
+ }
+ ]
+}
+
+MAPPING_GROUPS_BLACKLIST_MISSING_DOMAIN = {
+ "rules": [
+ {
+ "remote": [
+ {
+ "type": "orgPersonType",
+ "blacklist": [
+ "Developer", "Manager"
+ ]
+ },
+ ],
+ "local": [
+ {
+ "groups": "{0}",
+ },
+ ]
+ }
+ ]
+}
+
+MAPPING_GROUPS_WHITELIST_AND_BLACKLIST = {
+ "rules": [
+ {
+ "remote": [
+ {
+ "type": "orgPersonType",
+ "blacklist": [
+ "Employee"
+ ],
+ "whitelist": [
+ "Contractor"
+ ]
+ },
+ ],
+ "local": [
+ {
+ "groups": "{0}",
+ "domain": {
+ "id": DEVELOPER_GROUP_DOMAIN_ID
+ }
+ },
+ ]
+ }
+ ]
+}
+
+EMPLOYEE_ASSERTION = {
+ 'Email': 'tim@example.com',
+ 'UserName': 'tbo',
+ 'FirstName': 'Tim',
+ 'LastName': 'Bo',
+ 'orgPersonType': 'Employee;BuildingX'
+}
+
+EMPLOYEE_ASSERTION_MULTIPLE_GROUPS = {
+ 'Email': 'tim@example.com',
+ 'UserName': 'tbo',
+ 'FirstName': 'Tim',
+ 'LastName': 'Bo',
+ 'orgPersonType': 'Developer;Manager;Contractor',
+ 'Thing': 'yes!;maybe!;no!!'
+}
+
+EMPLOYEE_ASSERTION_PREFIXED = {
+ 'PREFIX_Email': 'tim@example.com',
+ 'PREFIX_UserName': 'tbo',
+ 'PREFIX_FirstName': 'Tim',
+ 'PREFIX_LastName': 'Bo',
+ 'PREFIX_orgPersonType': 'SuperEmployee;BuildingX'
+}
+
+CONTRACTOR_ASSERTION = {
+ 'Email': 'jill@example.com',
+ 'UserName': 'jsmith',
+ 'FirstName': 'Jill',
+ 'LastName': 'Smith',
+ 'orgPersonType': 'Contractor;Non-Dev'
+}
+
+ADMIN_ASSERTION = {
+ 'Email': 'bob@example.com',
+ 'UserName': 'bob',
+ 'FirstName': 'Bob',
+ 'LastName': 'Thompson',
+ 'orgPersonType': 'Admin;Chief'
+}
+
+CUSTOMER_ASSERTION = {
+ 'Email': 'beth@example.com',
+ 'UserName': 'bwilliams',
+ 'FirstName': 'Beth',
+ 'LastName': 'Williams',
+ 'orgPersonType': 'Customer'
+}
+
+ANOTHER_CUSTOMER_ASSERTION = {
+ 'Email': 'mark@example.com',
+ 'UserName': 'markcol',
+ 'FirstName': 'Mark',
+ 'LastName': 'Collins',
+ 'orgPersonType': 'Managers;CEO;CTO'
+}
+
+TESTER_ASSERTION = {
+ 'Email': 'testacct@example.com',
+ 'UserName': 'testacct',
+ 'FirstName': 'Test',
+ 'LastName': 'Account',
+ 'orgPersonType': 'MadeupGroup;Tester;GroupX'
+}
+
+ANOTHER_TESTER_ASSERTION = {
+ 'UserName': 'IamTester'
+}
+
+BAD_TESTER_ASSERTION = {
+ 'Email': 'eviltester@example.org',
+ 'UserName': 'Evil',
+ 'FirstName': 'Test',
+ 'LastName': 'Account',
+ 'orgPersonType': 'Tester'
+}
+
+BAD_DEVELOPER_ASSERTION = {
+ 'Email': 'evildeveloper@example.org',
+ 'UserName': 'Evil',
+ 'FirstName': 'Develop',
+ 'LastName': 'Account',
+ 'orgPersonType': 'Developer'
+}
+
+MALFORMED_TESTER_ASSERTION = {
+ 'Email': 'testacct@example.com',
+ 'UserName': 'testacct',
+ 'FirstName': 'Test',
+ 'LastName': 'Account',
+ 'orgPersonType': 'Tester',
+ 'object': object(),
+ 'dictionary': dict(zip('teststring', xrange(10))),
+ 'tuple': tuple(xrange(5))
+}
+
+DEVELOPER_ASSERTION = {
+ 'Email': 'developacct@example.com',
+ 'UserName': 'developacct',
+ 'FirstName': 'Develop',
+ 'LastName': 'Account',
+ 'orgPersonType': 'Developer'
+}
+
+CONTRACTOR_MALFORMED_ASSERTION = {
+ 'UserName': 'user',
+ 'FirstName': object(),
+ 'orgPersonType': 'Contractor'
+}
+
+LOCAL_USER_ASSERTION = {
+ 'UserName': 'marek',
+ 'UserType': 'random'
+}
+
+ANOTHER_LOCAL_USER_ASSERTION = {
+ 'UserName': 'marek',
+ 'Position': 'DirectorGeneral'
+}
+
+UNMATCHED_GROUP_ASSERTION = {
+ 'REMOTE_USER': 'Any Momoose',
+ 'REMOTE_USER_GROUPS': 'EXISTS;NO_EXISTS'
+}
diff --git a/keystone-moon/keystone/tests/unit/rest.py b/keystone-moon/keystone/tests/unit/rest.py
new file mode 100644
index 00000000..16513024
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/rest.py
@@ -0,0 +1,245 @@
+# Copyright 2013 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from oslo_serialization import jsonutils
+import six
+import webtest
+
+from keystone.auth import controllers as auth_controllers
+from keystone.tests import unit as tests
+from keystone.tests.unit import default_fixtures
+from keystone.tests.unit.ksfixtures import database
+
+
+class RestfulTestCase(tests.TestCase):
+ """Performs restful tests against the WSGI app over HTTP.
+
+ This class launches public & admin WSGI servers for every test, which can
+ be accessed by calling ``public_request()`` or ``admin_request()``,
+ respectfully.
+
+ ``restful_request()`` and ``request()`` methods are also exposed if you
+ need to bypass restful conventions or access HTTP details in your test
+ implementation.
+
+ Three new asserts are provided:
+
+ * ``assertResponseSuccessful``: called automatically for every request
+ unless an ``expected_status`` is provided
+ * ``assertResponseStatus``: called instead of ``assertResponseSuccessful``,
+ if an ``expected_status`` is provided
+ * ``assertValidResponseHeaders``: validates that the response headers
+ appear as expected
+
+ Requests are automatically serialized according to the defined
+ ``content_type``. Responses are automatically deserialized as well, and
+ available in the ``response.body`` attribute. The original body content is
+ available in the ``response.raw`` attribute.
+
+ """
+
+ # default content type to test
+ content_type = 'json'
+
+ def get_extensions(self):
+ return None
+
+ def setUp(self, app_conf='keystone'):
+ super(RestfulTestCase, self).setUp()
+
+ # Will need to reset the plug-ins
+ self.addCleanup(setattr, auth_controllers, 'AUTH_METHODS', {})
+
+ self.useFixture(database.Database(extensions=self.get_extensions()))
+ self.load_backends()
+ self.load_fixtures(default_fixtures)
+
+ self.public_app = webtest.TestApp(
+ self.loadapp(app_conf, name='main'))
+ self.addCleanup(delattr, self, 'public_app')
+ self.admin_app = webtest.TestApp(
+ self.loadapp(app_conf, name='admin'))
+ self.addCleanup(delattr, self, 'admin_app')
+
+ def request(self, app, path, body=None, headers=None, token=None,
+ expected_status=None, **kwargs):
+ if headers:
+ headers = {str(k): str(v) for k, v in six.iteritems(headers)}
+ else:
+ headers = {}
+
+ if token:
+ headers['X-Auth-Token'] = str(token)
+
+ # sets environ['REMOTE_ADDR']
+ kwargs.setdefault('remote_addr', 'localhost')
+
+ response = app.request(path, headers=headers,
+ status=expected_status, body=body,
+ **kwargs)
+
+ return response
+
+ def assertResponseSuccessful(self, response):
+ """Asserts that a status code lies inside the 2xx range.
+
+ :param response: :py:class:`httplib.HTTPResponse` to be
+ verified to have a status code between 200 and 299.
+
+ example::
+
+ self.assertResponseSuccessful(response)
+ """
+ self.assertTrue(
+ response.status_code >= 200 and response.status_code <= 299,
+ 'Status code %d is outside of the expected range (2xx)\n\n%s' %
+ (response.status, response.body))
+
+ def assertResponseStatus(self, response, expected_status):
+ """Asserts a specific status code on the response.
+
+ :param response: :py:class:`httplib.HTTPResponse`
+ :param expected_status: The specific ``status`` result expected
+
+ example::
+
+ self.assertResponseStatus(response, 204)
+ """
+ self.assertEqual(
+ response.status_code,
+ expected_status,
+ 'Status code %s is not %s, as expected)\n\n%s' %
+ (response.status_code, expected_status, response.body))
+
+ def assertValidResponseHeaders(self, response):
+ """Ensures that response headers appear as expected."""
+ self.assertIn('X-Auth-Token', response.headers.get('Vary'))
+
+ def assertValidErrorResponse(self, response, expected_status=400):
+ """Verify that the error response is valid.
+
+ Subclasses can override this function based on the expected response.
+
+ """
+ self.assertEqual(response.status_code, expected_status)
+ error = response.result['error']
+ self.assertEqual(error['code'], response.status_code)
+ self.assertIsNotNone(error.get('title'))
+
+ def _to_content_type(self, body, headers, content_type=None):
+ """Attempt to encode JSON and XML automatically."""
+ content_type = content_type or self.content_type
+
+ if content_type == 'json':
+ headers['Accept'] = 'application/json'
+ if body:
+ headers['Content-Type'] = 'application/json'
+ return jsonutils.dumps(body)
+
+ def _from_content_type(self, response, content_type=None):
+ """Attempt to decode JSON and XML automatically, if detected."""
+ content_type = content_type or self.content_type
+
+ if response.body is not None and response.body.strip():
+ # if a body is provided, a Content-Type is also expected
+ header = response.headers.get('Content-Type')
+ self.assertIn(content_type, header)
+
+ if content_type == 'json':
+ response.result = jsonutils.loads(response.body)
+ else:
+ response.result = response.body
+
+ def restful_request(self, method='GET', headers=None, body=None,
+ content_type=None, response_content_type=None,
+ **kwargs):
+ """Serializes/deserializes json as request/response body.
+
+ .. WARNING::
+
+ * Existing Accept header will be overwritten.
+ * Existing Content-Type header will be overwritten.
+
+ """
+ # Initialize headers dictionary
+ headers = {} if not headers else headers
+
+ body = self._to_content_type(body, headers, content_type)
+
+ # Perform the HTTP request/response
+ response = self.request(method=method, headers=headers, body=body,
+ **kwargs)
+
+ response_content_type = response_content_type or content_type
+ self._from_content_type(response, content_type=response_content_type)
+
+ # we can save some code & improve coverage by always doing this
+ if method != 'HEAD' and response.status_code >= 400:
+ self.assertValidErrorResponse(response)
+
+ # Contains the decoded response.body
+ return response
+
+ def _request(self, convert=True, **kwargs):
+ if convert:
+ response = self.restful_request(**kwargs)
+ else:
+ response = self.request(**kwargs)
+
+ self.assertValidResponseHeaders(response)
+ return response
+
+ def public_request(self, **kwargs):
+ return self._request(app=self.public_app, **kwargs)
+
+ def admin_request(self, **kwargs):
+ return self._request(app=self.admin_app, **kwargs)
+
+ def _get_token(self, body):
+ """Convenience method so that we can test authenticated requests."""
+ r = self.public_request(method='POST', path='/v2.0/tokens', body=body)
+ return self._get_token_id(r)
+
+ def get_unscoped_token(self):
+ """Convenience method so that we can test authenticated requests."""
+ return self._get_token({
+ 'auth': {
+ 'passwordCredentials': {
+ 'username': self.user_foo['name'],
+ 'password': self.user_foo['password'],
+ },
+ },
+ })
+
+ def get_scoped_token(self, tenant_id=None):
+ """Convenience method so that we can test authenticated requests."""
+ if not tenant_id:
+ tenant_id = self.tenant_bar['id']
+ return self._get_token({
+ 'auth': {
+ 'passwordCredentials': {
+ 'username': self.user_foo['name'],
+ 'password': self.user_foo['password'],
+ },
+ 'tenantId': tenant_id,
+ },
+ })
+
+ def _get_token_id(self, r):
+ """Helper method to return a token ID from a response.
+
+ This needs to be overridden by child classes for on their content type.
+
+ """
+ raise NotImplementedError()
diff --git a/keystone-moon/keystone/tests/unit/saml2/idp_saml2_metadata.xml b/keystone-moon/keystone/tests/unit/saml2/idp_saml2_metadata.xml
new file mode 100644
index 00000000..db235f7c
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/saml2/idp_saml2_metadata.xml
@@ -0,0 +1,25 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<ns0:EntityDescriptor xmlns:ns0="urn:oasis:names:tc:SAML:2.0:metadata" xmlns:ns1="http://www.w3.org/2000/09/xmldsig#" entityID="k2k.com/v3/OS-FEDERATION/idp" validUntil="2014-08-19T21:24:17.411289Z">
+ <ns0:IDPSSODescriptor protocolSupportEnumeration="urn:oasis:names:tc:SAML:2.0:protocol">
+ <ns0:KeyDescriptor use="signing">
+ <ns1:KeyInfo>
+ <ns1:X509Data>
+ <ns1:X509Certificate>MIIDpTCCAo0CAREwDQYJKoZIhvcNAQEFBQAwgZ4xCjAIBgNVBAUTATUxCzAJBgNVBAYTAlVTMQswCQYDVQQIEwJDQTESMBAGA1UEBxMJU3Vubnl2YWxlMRIwEAYDVQQKEwlPcGVuU3RhY2sxETAPBgNVBAsTCEtleXN0b25lMSUwIwYJKoZIhvcNAQkBFhZrZXlzdG9uZUBvcGVuc3RhY2sub3JnMRQwEgYDVQQDEwtTZWxmIFNpZ25lZDAgFw0xMzA3MDkxNjI1MDBaGA8yMDcyMDEwMTE2MjUwMFowgY8xCzAJBgNVBAYTAlVTMQswCQYDVQQIEwJDQTESMBAGA1UEBxMJU3Vubnl2YWxlMRIwEAYDVQQKEwlPcGVuU3RhY2sxETAPBgNVBAsTCEtleXN0b25lMSUwIwYJKoZIhvcNAQkBFhZrZXlzdG9uZUBvcGVuc3RhY2sub3JnMREwDwYDVQQDEwhLZXlzdG9uZTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMTC6IdNd9Cg1DshcrT5gRVRF36nEmjSA9QWdik7B925PK70U4F6j4pz/5JL7plIo/8rJ4jJz9ccE7m0iA+IuABtEhEwXkG9rj47Oy0J4ZyDGSh2K1Bl78PA9zxXSzysUTSjBKdAh29dPYbJY7cgZJ0uC3AtfVceYiAOIi14SdFeZ0LZLDXBuLaqUmSMrmKwJ9wAMOCb/jbBP9/3Ycd0GYjlvrSBU4Bqb8/NHasyO4DpPN68OAoyD5r5jUtV8QZN03UjIsoux8e0lrL6+MVtJo0OfWvlSrlzS5HKSryY+uqqQEuxtZKpJM2MV85ujvjc8eDSChh2shhDjBem3FIlHKUCAwEAATANBgkqhkiG9w0BAQUFAAOCAQEAed9fHgdJrk+gZcO5gsqq6uURfDOuYD66GsSdZw4BqHjYAcnyWq2da+iw7Uxkqu7iLf2k4+Hu3xjDFrce479OwZkSnbXmqB7XspTGOuM8MgT7jB/ypKTOZ6qaZKSWK1Hta995hMrVVlhUNBLh0MPGqoVWYA4d7mblujgH9vp+4mpCciJagHks8K5FBmI+pobB+uFdSYDoRzX9LTpStspK4e3IoY8baILuGcdKimRNBv6ItG4hMrntAe1/nWMJyUu5rDTGf2V/vAaS0S/faJBwQSz1o38QHMTWHNspfwIdX3yMqI9u7/vYlz3rLy5WdBdUgZrZ3/VLmJTiJVZu5Owq4Q==
+</ns1:X509Certificate>
+ </ns1:X509Data>
+ </ns1:KeyInfo>
+ </ns0:KeyDescriptor>
+ </ns0:IDPSSODescriptor>
+ <ns0:Organization>
+ <ns0:OrganizationName xml:lang="en">openstack</ns0:OrganizationName>
+ <ns0:OrganizationDisplayName xml:lang="en">openstack</ns0:OrganizationDisplayName>
+ <ns0:OrganizationURL xml:lang="en">openstack</ns0:OrganizationURL>
+ </ns0:Organization>
+ <ns0:ContactPerson contactType="technical">
+ <ns0:Company>openstack</ns0:Company>
+ <ns0:GivenName>first</ns0:GivenName>
+ <ns0:SurName>lastname</ns0:SurName>
+ <ns0:EmailAddress>admin@example.com</ns0:EmailAddress>
+ <ns0:TelephoneNumber>555-555-5555</ns0:TelephoneNumber>
+ </ns0:ContactPerson>
+</ns0:EntityDescriptor>
diff --git a/keystone-moon/keystone/tests/unit/saml2/signed_saml2_assertion.xml b/keystone-moon/keystone/tests/unit/saml2/signed_saml2_assertion.xml
new file mode 100644
index 00000000..410f9388
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/saml2/signed_saml2_assertion.xml
@@ -0,0 +1,63 @@
+<ns0:Assertion xmlns:ns0="urn:oasis:names:tc:SAML:2.0:assertion" xmlns:ns1="http://www.w3.org/2000/09/xmldsig#" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" ID="9a22528bfe194b2880edce5d60414d6a" IssueInstant="2014-08-19T10:53:57Z" Version="2.0">
+ <ns0:Issuer Format="urn:oasis:names:tc:SAML:2.0:nameid-format:entity">https://acme.com/FIM/sps/openstack/saml20</ns0:Issuer>
+ <ns1:Signature>
+ <ns1:SignedInfo>
+ <ns1:CanonicalizationMethod Algorithm="http://www.w3.org/2001/10/xml-exc-c14n#" />
+ <ns1:SignatureMethod Algorithm="http://www.w3.org/2000/09/xmldsig#rsa-sha1" />
+ <ns1:Reference URI="#9a22528bfe194b2880edce5d60414d6a">
+ <ns1:Transforms>
+ <ns1:Transform Algorithm="http://www.w3.org/2000/09/xmldsig#enveloped-signature" />
+ <ns1:Transform Algorithm="http://www.w3.org/2001/10/xml-exc-c14n#" />
+ </ns1:Transforms>
+ <ns1:DigestMethod Algorithm="http://www.w3.org/2000/09/xmldsig#sha1" />
+ <ns1:DigestValue>Lem2TKyYt+/tJy2iSos1t0KxcJE=</ns1:DigestValue>
+ </ns1:Reference>
+ </ns1:SignedInfo>
+ <ns1:SignatureValue>b//GXtGeCIJPFsMAHrx4+3yjrL4smSpRLXG9PB3TLMJvU4fx8n2PzK7+VbtWNbZG
+vSgbvbQR52jq77iyaRfQ2iELuFEY+YietLRi7hsitkJCEayPmU+BDlNIGuCXZjAy
+7tmtGFkLlZZJaom1jAzHfZ5JPjZdM5hvQwrhCI2Kzyk=</ns1:SignatureValue>
+ <ns1:KeyInfo>
+ <ns1:X509Data>
+ <ns1:X509Certificate>MIICtjCCAh+gAwIBAgIJAJTeBUN2i9ZNMA0GCSqGSIb3DQEBBQUAME4xCzAJBgNV
+BAYTAkhSMQ8wDQYDVQQIEwZaYWdyZWIxITAfBgNVBAoTGE5la2Egb3JnYW5pemFj
+aWphIGQuby5vLjELMAkGA1UEAxMCQ0EwHhcNMTIxMjI4MTYwODA1WhcNMTQxMjI4
+MTYwODA1WjBvMQswCQYDVQQGEwJIUjEPMA0GA1UECBMGWmFncmViMQ8wDQYDVQQH
+EwZaYWdyZWIxITAfBgNVBAoTGE5la2Egb3JnYW5pemFjaWphIGQuby5vLjEbMBkG
+A1UEAxMSUHJvZ3JhbWVyc2thIGZpcm1hMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCB
+iQKBgQCgWApHV5cma0GY/v/vmwgciDQBgITcitx2rG0F+ghXtGiEJeK75VY7jQwE
+UFCbgV+AaOY2NQChK2FKec7Hss/5y+jbWfX2yVwX6TYcCwnOGXenz+cgx2Fwqpu3
+ncL6dYJMfdbKvojBaJQLJTaNjRJsZACButDsDtXDSH9QaRy+hQIDAQABo3sweTAJ
+BgNVHRMEAjAAMCwGCWCGSAGG+EIBDQQfFh1PcGVuU1NMIEdlbmVyYXRlZCBDZXJ0
+aWZpY2F0ZTAdBgNVHQ4EFgQUSo9ThP/MOg8QIRWxoPo8qKR8O2wwHwYDVR0jBBgw
+FoAUAelckr4bx8MwZ7y+VlHE46Mbo+cwDQYJKoZIhvcNAQEFBQADgYEAy19Z7Z5/
+/MlWkogu41s0RxL9ffG60QQ0Y8hhDTmgHNx1itj0wT8pB7M4KVMbZ4hjjSFsfRq4
+Vj7jm6LwU0WtZ3HGl8TygTh8AAJvbLROnTjLL5MqI9d9pKvIIfZ2Qs3xmJ7JEv4H
+UHeBXxQq/GmfBv3l+V5ObQ+EHKnyDodLHCk=</ns1:X509Certificate>
+ </ns1:X509Data>
+ </ns1:KeyInfo>
+ </ns1:Signature>
+ <ns0:Subject>
+ <ns0:NameID Format="urn:oasis:names:tc:SAML:1.1:nameid-format:emailAddress">test_user</ns0:NameID>
+ <ns0:SubjectConfirmation Method="urn:oasis:names:tc:SAML:2.0:cm:bearer">
+ <ns0:SubjectConfirmationData NotOnOrAfter="2014-08-19T11:53:57.243106Z" Recipient="http://beta.com/Shibboleth.sso/SAML2/POST" />
+ </ns0:SubjectConfirmation>
+ </ns0:Subject>
+ <ns0:AuthnStatement AuthnInstant="2014-08-19T10:53:57Z" SessionIndex="4e3430a9f8b941e69c133293a7a960a1" SessionNotOnOrAfter="2014-08-19T11:53:57.243106Z">
+ <ns0:AuthnContext>
+ <ns0:AuthnContextClassRef>urn:oasis:names:tc:SAML:2.0:ac:classes:Password</ns0:AuthnContextClassRef>
+ <ns0:AuthenticatingAuthority>https://acme.com/FIM/sps/openstack/saml20</ns0:AuthenticatingAuthority>
+ </ns0:AuthnContext>
+ </ns0:AuthnStatement>
+ <ns0:AttributeStatement>
+ <ns0:Attribute FriendlyName="keystone_user" Name="user" NameFormat="urn:oasis:names:tc:SAML:2.0:attrname-format:uri">
+ <ns0:AttributeValue xsi:type="xs:string">test_user</ns0:AttributeValue>
+ </ns0:Attribute>
+ <ns0:Attribute FriendlyName="keystone_roles" Name="roles" NameFormat="urn:oasis:names:tc:SAML:2.0:attrname-format:uri">
+ <ns0:AttributeValue xsi:type="xs:string">admin</ns0:AttributeValue>
+ <ns0:AttributeValue xsi:type="xs:string">member</ns0:AttributeValue>
+ </ns0:Attribute>
+ <ns0:Attribute FriendlyName="keystone_project" Name="project" NameFormat="urn:oasis:names:tc:SAML:2.0:attrname-format:uri">
+ <ns0:AttributeValue xsi:type="xs:string">development</ns0:AttributeValue>
+ </ns0:Attribute>
+ </ns0:AttributeStatement>
+</ns0:Assertion>
diff --git a/keystone-moon/keystone/tests/unit/test_associate_project_endpoint_extension.py b/keystone-moon/keystone/tests/unit/test_associate_project_endpoint_extension.py
new file mode 100644
index 00000000..e0159b76
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/test_associate_project_endpoint_extension.py
@@ -0,0 +1,1129 @@
+# Copyright 2013 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import copy
+import uuid
+
+from testtools import matchers
+
+# NOTE(morganfainberg): import endpoint filter to populate the SQL model
+from keystone.contrib import endpoint_filter # noqa
+from keystone.tests.unit import test_v3
+
+
+class TestExtensionCase(test_v3.RestfulTestCase):
+
+ EXTENSION_NAME = 'endpoint_filter'
+ EXTENSION_TO_ADD = 'endpoint_filter_extension'
+
+ def config_overrides(self):
+ super(TestExtensionCase, self).config_overrides()
+ self.config_fixture.config(
+ group='catalog',
+ driver='keystone.contrib.endpoint_filter.backends.catalog_sql.'
+ 'EndpointFilterCatalog')
+
+ def setUp(self):
+ super(TestExtensionCase, self).setUp()
+ self.default_request_url = (
+ '/OS-EP-FILTER/projects/%(project_id)s'
+ '/endpoints/%(endpoint_id)s' % {
+ 'project_id': self.default_domain_project_id,
+ 'endpoint_id': self.endpoint_id})
+
+
+class EndpointFilterCRUDTestCase(TestExtensionCase):
+
+ def test_create_endpoint_project_association(self):
+ """PUT /OS-EP-FILTER/projects/{project_id}/endpoints/{endpoint_id}
+
+ Valid endpoint and project id test case.
+
+ """
+ self.put(self.default_request_url,
+ body='',
+ expected_status=204)
+
+ def test_create_endpoint_project_association_with_invalid_project(self):
+ """PUT OS-EP-FILTER/projects/{project_id}/endpoints/{endpoint_id}
+
+ Invalid project id test case.
+
+ """
+ self.put('/OS-EP-FILTER/projects/%(project_id)s'
+ '/endpoints/%(endpoint_id)s' % {
+ 'project_id': uuid.uuid4().hex,
+ 'endpoint_id': self.endpoint_id},
+ body='',
+ expected_status=404)
+
+ def test_create_endpoint_project_association_with_invalid_endpoint(self):
+ """PUT /OS-EP-FILTER/projects/{project_id}/endpoints/{endpoint_id}
+
+ Invalid endpoint id test case.
+
+ """
+ self.put('/OS-EP-FILTER/projects/%(project_id)s'
+ '/endpoints/%(endpoint_id)s' % {
+ 'project_id': self.default_domain_project_id,
+ 'endpoint_id': uuid.uuid4().hex},
+ body='',
+ expected_status=404)
+
+ def test_create_endpoint_project_association_with_unexpected_body(self):
+ """PUT /OS-EP-FILTER/projects/{project_id}/endpoints/{endpoint_id}
+
+ Unexpected body in request. The body should be ignored.
+
+ """
+ self.put(self.default_request_url,
+ body={'project_id': self.default_domain_project_id},
+ expected_status=204)
+
+ def test_check_endpoint_project_association(self):
+ """HEAD /OS-EP-FILTER/projects/{project_id}/endpoints/{endpoint_id}
+
+ Valid project and endpoint id test case.
+
+ """
+ self.put(self.default_request_url,
+ body='',
+ expected_status=204)
+ self.head('/OS-EP-FILTER/projects/%(project_id)s'
+ '/endpoints/%(endpoint_id)s' % {
+ 'project_id': self.default_domain_project_id,
+ 'endpoint_id': self.endpoint_id},
+ expected_status=204)
+
+ def test_check_endpoint_project_association_with_invalid_project(self):
+ """HEAD /OS-EP-FILTER/projects/{project_id}/endpoints/{endpoint_id}
+
+ Invalid project id test case.
+
+ """
+ self.put(self.default_request_url)
+ self.head('/OS-EP-FILTER/projects/%(project_id)s'
+ '/endpoints/%(endpoint_id)s' % {
+ 'project_id': uuid.uuid4().hex,
+ 'endpoint_id': self.endpoint_id},
+ body='',
+ expected_status=404)
+
+ def test_check_endpoint_project_association_with_invalid_endpoint(self):
+ """HEAD /OS-EP-FILTER/projects/{project_id}/endpoints/{endpoint_id}
+
+ Invalid endpoint id test case.
+
+ """
+ self.put(self.default_request_url)
+ self.head('/OS-EP-FILTER/projects/%(project_id)s'
+ '/endpoints/%(endpoint_id)s' % {
+ 'project_id': self.default_domain_project_id,
+ 'endpoint_id': uuid.uuid4().hex},
+ body='',
+ expected_status=404)
+
+ def test_list_endpoints_associated_with_valid_project(self):
+ """GET /OS-EP-FILTER/projects/{project_id}/endpoints
+
+ Valid project and endpoint id test case.
+
+ """
+ self.put(self.default_request_url)
+ resource_url = '/OS-EP-FILTER/projects/%(project_id)s/endpoints' % {
+ 'project_id': self.default_domain_project_id}
+ r = self.get(resource_url)
+ self.assertValidEndpointListResponse(r, self.endpoint,
+ resource_url=resource_url)
+
+ def test_list_endpoints_associated_with_invalid_project(self):
+ """GET /OS-EP-FILTER/projects/{project_id}/endpoints
+
+ Invalid project id test case.
+
+ """
+ self.put(self.default_request_url)
+ self.get('/OS-EP-FILTER/projects/%(project_id)s/endpoints' % {
+ 'project_id': uuid.uuid4().hex},
+ body='',
+ expected_status=404)
+
+ def test_list_projects_associated_with_endpoint(self):
+ """GET /OS-EP-FILTER/endpoints/{endpoint_id}/projects
+
+ Valid endpoint-project association test case.
+
+ """
+ self.put(self.default_request_url)
+ resource_url = '/OS-EP-FILTER/endpoints/%(endpoint_id)s/projects' % {
+ 'endpoint_id': self.endpoint_id}
+ r = self.get(resource_url)
+ self.assertValidProjectListResponse(r, self.default_domain_project,
+ resource_url=resource_url)
+
+ def test_list_projects_with_no_endpoint_project_association(self):
+ """GET /OS-EP-FILTER/endpoints/{endpoint_id}/projects
+
+ Valid endpoint id but no endpoint-project associations test case.
+
+ """
+ r = self.get('/OS-EP-FILTER/endpoints/%(endpoint_id)s/projects' %
+ {'endpoint_id': self.endpoint_id},
+ expected_status=200)
+ self.assertValidProjectListResponse(r, expected_length=0)
+
+ def test_list_projects_associated_with_invalid_endpoint(self):
+ """GET /OS-EP-FILTER/endpoints/{endpoint_id}/projects
+
+ Invalid endpoint id test case.
+
+ """
+ self.get('/OS-EP-FILTER/endpoints/%(endpoint_id)s/projects' %
+ {'endpoint_id': uuid.uuid4().hex},
+ expected_status=404)
+
+ def test_remove_endpoint_project_association(self):
+ """DELETE /OS-EP-FILTER/projects/{project_id}/endpoints/{endpoint_id}
+
+ Valid project id and endpoint id test case.
+
+ """
+ self.put(self.default_request_url)
+ self.delete('/OS-EP-FILTER/projects/%(project_id)s'
+ '/endpoints/%(endpoint_id)s' % {
+ 'project_id': self.default_domain_project_id,
+ 'endpoint_id': self.endpoint_id},
+ expected_status=204)
+
+ def test_remove_endpoint_project_association_with_invalid_project(self):
+ """DELETE /OS-EP-FILTER/projects/{project_id}/endpoints/{endpoint_id}
+
+ Invalid project id test case.
+
+ """
+ self.put(self.default_request_url)
+ self.delete('/OS-EP-FILTER/projects/%(project_id)s'
+ '/endpoints/%(endpoint_id)s' % {
+ 'project_id': uuid.uuid4().hex,
+ 'endpoint_id': self.endpoint_id},
+ body='',
+ expected_status=404)
+
+ def test_remove_endpoint_project_association_with_invalid_endpoint(self):
+ """DELETE /OS-EP-FILTER/projects/{project_id}/endpoints/{endpoint_id}
+
+ Invalid endpoint id test case.
+
+ """
+ self.put(self.default_request_url)
+ self.delete('/OS-EP-FILTER/projects/%(project_id)s'
+ '/endpoints/%(endpoint_id)s' % {
+ 'project_id': self.default_domain_project_id,
+ 'endpoint_id': uuid.uuid4().hex},
+ body='',
+ expected_status=404)
+
+ def test_endpoint_project_association_cleanup_when_project_deleted(self):
+ self.put(self.default_request_url)
+ association_url = ('/OS-EP-FILTER/endpoints/%(endpoint_id)s/projects' %
+ {'endpoint_id': self.endpoint_id})
+ r = self.get(association_url, expected_status=200)
+ self.assertValidProjectListResponse(r, expected_length=1)
+
+ self.delete('/projects/%(project_id)s' % {
+ 'project_id': self.default_domain_project_id})
+
+ r = self.get(association_url, expected_status=200)
+ self.assertValidProjectListResponse(r, expected_length=0)
+
+ def test_endpoint_project_association_cleanup_when_endpoint_deleted(self):
+ self.put(self.default_request_url)
+ association_url = '/OS-EP-FILTER/projects/%(project_id)s/endpoints' % {
+ 'project_id': self.default_domain_project_id}
+ r = self.get(association_url, expected_status=200)
+ self.assertValidEndpointListResponse(r, expected_length=1)
+
+ self.delete('/endpoints/%(endpoint_id)s' % {
+ 'endpoint_id': self.endpoint_id})
+
+ r = self.get(association_url, expected_status=200)
+ self.assertValidEndpointListResponse(r, expected_length=0)
+
+
+class EndpointFilterTokenRequestTestCase(TestExtensionCase):
+
+ def test_project_scoped_token_using_endpoint_filter(self):
+ """Verify endpoints from project scoped token filtered."""
+ # create a project to work with
+ ref = self.new_project_ref(domain_id=self.domain_id)
+ r = self.post('/projects', body={'project': ref})
+ project = self.assertValidProjectResponse(r, ref)
+
+ # grant the user a role on the project
+ self.put(
+ '/projects/%(project_id)s/users/%(user_id)s/roles/%(role_id)s' % {
+ 'user_id': self.user['id'],
+ 'project_id': project['id'],
+ 'role_id': self.role['id']})
+
+ # set the user's preferred project
+ body = {'user': {'default_project_id': project['id']}}
+ r = self.patch('/users/%(user_id)s' % {
+ 'user_id': self.user['id']},
+ body=body)
+ self.assertValidUserResponse(r)
+
+ # add one endpoint to the project
+ self.put('/OS-EP-FILTER/projects/%(project_id)s'
+ '/endpoints/%(endpoint_id)s' % {
+ 'project_id': project['id'],
+ 'endpoint_id': self.endpoint_id},
+ body='',
+ expected_status=204)
+
+ # attempt to authenticate without requesting a project
+ auth_data = self.build_authentication_request(
+ user_id=self.user['id'],
+ password=self.user['password'])
+ r = self.post('/auth/tokens', body=auth_data)
+ self.assertValidProjectScopedTokenResponse(
+ r,
+ require_catalog=True,
+ endpoint_filter=True,
+ ep_filter_assoc=1)
+ self.assertEqual(r.result['token']['project']['id'], project['id'])
+
+ def test_default_scoped_token_using_endpoint_filter(self):
+ """Verify endpoints from default scoped token filtered."""
+ # add one endpoint to default project
+ self.put('/OS-EP-FILTER/projects/%(project_id)s'
+ '/endpoints/%(endpoint_id)s' % {
+ 'project_id': self.project['id'],
+ 'endpoint_id': self.endpoint_id},
+ body='',
+ expected_status=204)
+
+ auth_data = self.build_authentication_request(
+ user_id=self.user['id'],
+ password=self.user['password'],
+ project_id=self.project['id'])
+ r = self.post('/auth/tokens', body=auth_data)
+ self.assertValidProjectScopedTokenResponse(
+ r,
+ require_catalog=True,
+ endpoint_filter=True,
+ ep_filter_assoc=1)
+ self.assertEqual(r.result['token']['project']['id'],
+ self.project['id'])
+
+ def test_project_scoped_token_with_no_catalog_using_endpoint_filter(self):
+ """Verify endpoint filter when project scoped token returns no catalog.
+
+ Test that the project scoped token response is valid for a given
+ endpoint-project association when no service catalog is returned.
+
+ """
+ # create a project to work with
+ ref = self.new_project_ref(domain_id=self.domain_id)
+ r = self.post('/projects', body={'project': ref})
+ project = self.assertValidProjectResponse(r, ref)
+
+ # grant the user a role on the project
+ self.put(
+ '/projects/%(project_id)s/users/%(user_id)s/roles/%(role_id)s' % {
+ 'user_id': self.user['id'],
+ 'project_id': project['id'],
+ 'role_id': self.role['id']})
+
+ # set the user's preferred project
+ body = {'user': {'default_project_id': project['id']}}
+ r = self.patch('/users/%(user_id)s' % {
+ 'user_id': self.user['id']},
+ body=body)
+ self.assertValidUserResponse(r)
+
+ # add one endpoint to the project
+ self.put('/OS-EP-FILTER/projects/%(project_id)s'
+ '/endpoints/%(endpoint_id)s' % {
+ 'project_id': project['id'],
+ 'endpoint_id': self.endpoint_id},
+ body='',
+ expected_status=204)
+
+ # attempt to authenticate without requesting a project
+ auth_data = self.build_authentication_request(
+ user_id=self.user['id'],
+ password=self.user['password'])
+ r = self.post('/auth/tokens?nocatalog', body=auth_data)
+ self.assertValidProjectScopedTokenResponse(
+ r,
+ require_catalog=False,
+ endpoint_filter=True,
+ ep_filter_assoc=1)
+ self.assertEqual(r.result['token']['project']['id'], project['id'])
+
+ def test_default_scoped_token_with_no_catalog_using_endpoint_filter(self):
+ """Verify endpoint filter when default scoped token returns no catalog.
+
+ Test that the default project scoped token response is valid for a
+ given endpoint-project association when no service catalog is returned.
+
+ """
+ # add one endpoint to default project
+ self.put('/OS-EP-FILTER/projects/%(project_id)s'
+ '/endpoints/%(endpoint_id)s' % {
+ 'project_id': self.project['id'],
+ 'endpoint_id': self.endpoint_id},
+ body='',
+ expected_status=204)
+
+ auth_data = self.build_authentication_request(
+ user_id=self.user['id'],
+ password=self.user['password'],
+ project_id=self.project['id'])
+ r = self.post('/auth/tokens?nocatalog', body=auth_data)
+ self.assertValidProjectScopedTokenResponse(
+ r,
+ require_catalog=False,
+ endpoint_filter=True,
+ ep_filter_assoc=1)
+ self.assertEqual(r.result['token']['project']['id'],
+ self.project['id'])
+
+ def test_project_scoped_token_with_no_endpoint_project_association(self):
+ """Verify endpoint filter when no endpoint-project association.
+
+ Test that the project scoped token response is valid when there are
+ no endpoint-project associations defined.
+
+ """
+ # create a project to work with
+ ref = self.new_project_ref(domain_id=self.domain_id)
+ r = self.post('/projects', body={'project': ref})
+ project = self.assertValidProjectResponse(r, ref)
+
+ # grant the user a role on the project
+ self.put(
+ '/projects/%(project_id)s/users/%(user_id)s/roles/%(role_id)s' % {
+ 'user_id': self.user['id'],
+ 'project_id': project['id'],
+ 'role_id': self.role['id']})
+
+ # set the user's preferred project
+ body = {'user': {'default_project_id': project['id']}}
+ r = self.patch('/users/%(user_id)s' % {
+ 'user_id': self.user['id']},
+ body=body)
+ self.assertValidUserResponse(r)
+
+ # attempt to authenticate without requesting a project
+ auth_data = self.build_authentication_request(
+ user_id=self.user['id'],
+ password=self.user['password'])
+ r = self.post('/auth/tokens?nocatalog', body=auth_data)
+ self.assertValidProjectScopedTokenResponse(
+ r,
+ require_catalog=False,
+ endpoint_filter=True)
+ self.assertEqual(r.result['token']['project']['id'], project['id'])
+
+ def test_default_scoped_token_with_no_endpoint_project_association(self):
+ """Verify endpoint filter when no endpoint-project association.
+
+ Test that the default project scoped token response is valid when
+ there are no endpoint-project associations defined.
+
+ """
+ auth_data = self.build_authentication_request(
+ user_id=self.user['id'],
+ password=self.user['password'],
+ project_id=self.project['id'])
+ r = self.post('/auth/tokens?nocatalog', body=auth_data)
+ self.assertValidProjectScopedTokenResponse(
+ r,
+ require_catalog=False,
+ endpoint_filter=True,)
+ self.assertEqual(r.result['token']['project']['id'],
+ self.project['id'])
+
+ def test_invalid_endpoint_project_association(self):
+ """Verify an invalid endpoint-project association is handled."""
+ # add first endpoint to default project
+ self.put('/OS-EP-FILTER/projects/%(project_id)s'
+ '/endpoints/%(endpoint_id)s' % {
+ 'project_id': self.project['id'],
+ 'endpoint_id': self.endpoint_id},
+ body='',
+ expected_status=204)
+
+ # create a second temporary endpoint
+ self.endpoint_id2 = uuid.uuid4().hex
+ self.endpoint2 = self.new_endpoint_ref(service_id=self.service_id)
+ self.endpoint2['id'] = self.endpoint_id2
+ self.catalog_api.create_endpoint(
+ self.endpoint_id2,
+ self.endpoint2.copy())
+
+ # add second endpoint to default project
+ self.put('/OS-EP-FILTER/projects/%(project_id)s'
+ '/endpoints/%(endpoint_id)s' % {
+ 'project_id': self.project['id'],
+ 'endpoint_id': self.endpoint_id2},
+ body='',
+ expected_status=204)
+
+ # remove the temporary reference
+ # this will create inconsistency in the endpoint filter table
+ # which is fixed during the catalog creation for token request
+ self.catalog_api.delete_endpoint(self.endpoint_id2)
+
+ auth_data = self.build_authentication_request(
+ user_id=self.user['id'],
+ password=self.user['password'],
+ project_id=self.project['id'])
+ r = self.post('/auth/tokens', body=auth_data)
+ self.assertValidProjectScopedTokenResponse(
+ r,
+ require_catalog=True,
+ endpoint_filter=True,
+ ep_filter_assoc=1)
+ self.assertEqual(r.result['token']['project']['id'],
+ self.project['id'])
+
+ def test_disabled_endpoint(self):
+ """Test that a disabled endpoint is handled."""
+ # Add an enabled endpoint to the default project
+ self.put('/OS-EP-FILTER/projects/%(project_id)s'
+ '/endpoints/%(endpoint_id)s' % {
+ 'project_id': self.project['id'],
+ 'endpoint_id': self.endpoint_id},
+ expected_status=204)
+
+ # Add a disabled endpoint to the default project.
+
+ # Create a disabled endpoint that's like the enabled one.
+ disabled_endpoint_ref = copy.copy(self.endpoint)
+ disabled_endpoint_id = uuid.uuid4().hex
+ disabled_endpoint_ref.update({
+ 'id': disabled_endpoint_id,
+ 'enabled': False,
+ 'interface': 'internal'
+ })
+ self.catalog_api.create_endpoint(disabled_endpoint_id,
+ disabled_endpoint_ref)
+
+ self.put('/OS-EP-FILTER/projects/%(project_id)s'
+ '/endpoints/%(endpoint_id)s' % {
+ 'project_id': self.project['id'],
+ 'endpoint_id': disabled_endpoint_id},
+ expected_status=204)
+
+ # Authenticate to get token with catalog
+ auth_data = self.build_authentication_request(
+ user_id=self.user['id'],
+ password=self.user['password'],
+ project_id=self.project['id'])
+ r = self.post('/auth/tokens', body=auth_data)
+
+ endpoints = r.result['token']['catalog'][0]['endpoints']
+ endpoint_ids = [ep['id'] for ep in endpoints]
+ self.assertEqual([self.endpoint_id], endpoint_ids)
+
+ def test_multiple_endpoint_project_associations(self):
+
+ def _create_an_endpoint():
+ endpoint_ref = self.new_endpoint_ref(service_id=self.service_id)
+ r = self.post('/endpoints', body={'endpoint': endpoint_ref})
+ return r.result['endpoint']['id']
+
+ # create three endpoints
+ endpoint_id1 = _create_an_endpoint()
+ endpoint_id2 = _create_an_endpoint()
+ _create_an_endpoint()
+
+ # only associate two endpoints with project
+ self.put('/OS-EP-FILTER/projects/%(project_id)s'
+ '/endpoints/%(endpoint_id)s' % {
+ 'project_id': self.project['id'],
+ 'endpoint_id': endpoint_id1},
+ expected_status=204)
+ self.put('/OS-EP-FILTER/projects/%(project_id)s'
+ '/endpoints/%(endpoint_id)s' % {
+ 'project_id': self.project['id'],
+ 'endpoint_id': endpoint_id2},
+ expected_status=204)
+
+ # there should be only two endpoints in token catalog
+ auth_data = self.build_authentication_request(
+ user_id=self.user['id'],
+ password=self.user['password'],
+ project_id=self.project['id'])
+ r = self.post('/auth/tokens', body=auth_data)
+ self.assertValidProjectScopedTokenResponse(
+ r,
+ require_catalog=True,
+ endpoint_filter=True,
+ ep_filter_assoc=2)
+
+
+class JsonHomeTests(TestExtensionCase, test_v3.JsonHomeTestMixin):
+ JSON_HOME_DATA = {
+ 'http://docs.openstack.org/api/openstack-identity/3/ext/OS-EP-FILTER/'
+ '1.0/rel/endpoint_projects': {
+ 'href-template': '/OS-EP-FILTER/endpoints/{endpoint_id}/projects',
+ 'href-vars': {
+ 'endpoint_id':
+ 'http://docs.openstack.org/api/openstack-identity/3/param/'
+ 'endpoint_id',
+ },
+ },
+ 'http://docs.openstack.org/api/openstack-identity/3/ext/OS-EP-FILTER/'
+ '1.0/rel/endpoint_groups': {
+ 'href': '/OS-EP-FILTER/endpoint_groups',
+ },
+ 'http://docs.openstack.org/api/openstack-identity/3/ext/OS-EP-FILTER/'
+ '1.0/rel/endpoint_group': {
+ 'href-template': '/OS-EP-FILTER/endpoint_groups/'
+ '{endpoint_group_id}',
+ 'href-vars': {
+ 'endpoint_group_id':
+ 'http://docs.openstack.org/api/openstack-identity/3/'
+ 'ext/OS-EP-FILTER/1.0/param/endpoint_group_id',
+ },
+ },
+ 'http://docs.openstack.org/api/openstack-identity/3/ext/OS-EP-FILTER/'
+ '1.0/rel/endpoint_group_to_project_association': {
+ 'href-template': '/OS-EP-FILTER/endpoint_groups/'
+ '{endpoint_group_id}/projects/{project_id}',
+ 'href-vars': {
+ 'project_id':
+ 'http://docs.openstack.org/api/openstack-identity/3/param/'
+ 'project_id',
+ 'endpoint_group_id':
+ 'http://docs.openstack.org/api/openstack-identity/3/'
+ 'ext/OS-EP-FILTER/1.0/param/endpoint_group_id',
+ },
+ },
+ 'http://docs.openstack.org/api/openstack-identity/3/ext/OS-EP-FILTER/'
+ '1.0/rel/projects_associated_with_endpoint_group': {
+ 'href-template': '/OS-EP-FILTER/endpoint_groups/'
+ '{endpoint_group_id}/projects',
+ 'href-vars': {
+ 'endpoint_group_id':
+ 'http://docs.openstack.org/api/openstack-identity/3/'
+ 'ext/OS-EP-FILTER/1.0/param/endpoint_group_id',
+ },
+ },
+ 'http://docs.openstack.org/api/openstack-identity/3/ext/OS-EP-FILTER/'
+ '1.0/rel/endpoints_in_endpoint_group': {
+ 'href-template': '/OS-EP-FILTER/endpoint_groups/'
+ '{endpoint_group_id}/endpoints',
+ 'href-vars': {
+ 'endpoint_group_id':
+ 'http://docs.openstack.org/api/openstack-identity/3/'
+ 'ext/OS-EP-FILTER/1.0/param/endpoint_group_id',
+ },
+ },
+ }
+
+
+class EndpointGroupCRUDTestCase(TestExtensionCase):
+
+ DEFAULT_ENDPOINT_GROUP_BODY = {
+ 'endpoint_group': {
+ 'description': 'endpoint group description',
+ 'filters': {
+ 'interface': 'admin'
+ },
+ 'name': 'endpoint_group_name'
+ }
+ }
+
+ DEFAULT_ENDPOINT_GROUP_URL = '/OS-EP-FILTER/endpoint_groups'
+
+ def test_create_endpoint_group(self):
+ """POST /OS-EP-FILTER/endpoint_groups
+
+ Valid endpoint group test case.
+
+ """
+ r = self.post(self.DEFAULT_ENDPOINT_GROUP_URL,
+ body=self.DEFAULT_ENDPOINT_GROUP_BODY)
+ expected_filters = (self.DEFAULT_ENDPOINT_GROUP_BODY
+ ['endpoint_group']['filters'])
+ expected_name = (self.DEFAULT_ENDPOINT_GROUP_BODY
+ ['endpoint_group']['name'])
+ self.assertEqual(expected_filters,
+ r.result['endpoint_group']['filters'])
+ self.assertEqual(expected_name, r.result['endpoint_group']['name'])
+ self.assertThat(
+ r.result['endpoint_group']['links']['self'],
+ matchers.EndsWith(
+ '/OS-EP-FILTER/endpoint_groups/%(endpoint_group_id)s' % {
+ 'endpoint_group_id': r.result['endpoint_group']['id']}))
+
+ def test_create_invalid_endpoint_group(self):
+ """POST /OS-EP-FILTER/endpoint_groups
+
+ Invalid endpoint group creation test case.
+
+ """
+ invalid_body = copy.deepcopy(self.DEFAULT_ENDPOINT_GROUP_BODY)
+ invalid_body['endpoint_group']['filters'] = {'foobar': 'admin'}
+ self.post(self.DEFAULT_ENDPOINT_GROUP_URL,
+ body=invalid_body,
+ expected_status=400)
+
+ def test_get_endpoint_group(self):
+ """GET /OS-EP-FILTER/endpoint_groups/{endpoint_group}
+
+ Valid endpoint group test case.
+
+ """
+ # create an endpoint group to work with
+ response = self.post(self.DEFAULT_ENDPOINT_GROUP_URL,
+ body=self.DEFAULT_ENDPOINT_GROUP_BODY)
+ endpoint_group_id = response.result['endpoint_group']['id']
+ endpoint_group_filters = response.result['endpoint_group']['filters']
+ endpoint_group_name = response.result['endpoint_group']['name']
+ url = '/OS-EP-FILTER/endpoint_groups/%(endpoint_group_id)s' % {
+ 'endpoint_group_id': endpoint_group_id}
+ self.get(url)
+ self.assertEqual(endpoint_group_id,
+ response.result['endpoint_group']['id'])
+ self.assertEqual(endpoint_group_filters,
+ response.result['endpoint_group']['filters'])
+ self.assertEqual(endpoint_group_name,
+ response.result['endpoint_group']['name'])
+ self.assertThat(response.result['endpoint_group']['links']['self'],
+ matchers.EndsWith(url))
+
+ def test_get_invalid_endpoint_group(self):
+ """GET /OS-EP-FILTER/endpoint_groups/{endpoint_group}
+
+ Invalid endpoint group test case.
+
+ """
+ endpoint_group_id = 'foobar'
+ url = '/OS-EP-FILTER/endpoint_groups/%(endpoint_group_id)s' % {
+ 'endpoint_group_id': endpoint_group_id}
+ self.get(url, expected_status=404)
+
+ def test_check_endpoint_group(self):
+ """HEAD /OS-EP-FILTER/endpoint_groups/{endpoint_group_id}
+
+ Valid endpoint_group_id test case.
+
+ """
+ # create an endpoint group to work with
+ endpoint_group_id = self._create_valid_endpoint_group(
+ self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY)
+ url = '/OS-EP-FILTER/endpoint_groups/%(endpoint_group_id)s' % {
+ 'endpoint_group_id': endpoint_group_id}
+ self.head(url, expected_status=200)
+
+ def test_check_invalid_endpoint_group(self):
+ """HEAD /OS-EP-FILTER/endpoint_groups/{endpoint_group_id}
+
+ Invalid endpoint_group_id test case.
+
+ """
+ endpoint_group_id = 'foobar'
+ url = '/OS-EP-FILTER/endpoint_groups/%(endpoint_group_id)s' % {
+ 'endpoint_group_id': endpoint_group_id}
+ self.head(url, expected_status=404)
+
+ def test_patch_endpoint_group(self):
+ """PATCH /OS-EP-FILTER/endpoint_groups/{endpoint_group}
+
+ Valid endpoint group patch test case.
+
+ """
+ body = copy.deepcopy(self.DEFAULT_ENDPOINT_GROUP_BODY)
+ body['endpoint_group']['filters'] = {'region_id': 'UK'}
+ body['endpoint_group']['name'] = 'patch_test'
+ # create an endpoint group to work with
+ endpoint_group_id = self._create_valid_endpoint_group(
+ self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY)
+ url = '/OS-EP-FILTER/endpoint_groups/%(endpoint_group_id)s' % {
+ 'endpoint_group_id': endpoint_group_id}
+ r = self.patch(url, body=body)
+ self.assertEqual(endpoint_group_id,
+ r.result['endpoint_group']['id'])
+ self.assertEqual(body['endpoint_group']['filters'],
+ r.result['endpoint_group']['filters'])
+ self.assertThat(r.result['endpoint_group']['links']['self'],
+ matchers.EndsWith(url))
+
+ def test_patch_nonexistent_endpoint_group(self):
+ """PATCH /OS-EP-FILTER/endpoint_groups/{endpoint_group}
+
+ Invalid endpoint group patch test case.
+
+ """
+ body = {
+ 'endpoint_group': {
+ 'name': 'patch_test'
+ }
+ }
+ url = '/OS-EP-FILTER/endpoint_groups/%(endpoint_group_id)s' % {
+ 'endpoint_group_id': 'ABC'}
+ self.patch(url, body=body, expected_status=404)
+
+ def test_patch_invalid_endpoint_group(self):
+ """PATCH /OS-EP-FILTER/endpoint_groups/{endpoint_group}
+
+ Valid endpoint group patch test case.
+
+ """
+ body = {
+ 'endpoint_group': {
+ 'description': 'endpoint group description',
+ 'filters': {
+ 'region': 'UK'
+ },
+ 'name': 'patch_test'
+ }
+ }
+ # create an endpoint group to work with
+ endpoint_group_id = self._create_valid_endpoint_group(
+ self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY)
+ url = '/OS-EP-FILTER/endpoint_groups/%(endpoint_group_id)s' % {
+ 'endpoint_group_id': endpoint_group_id}
+ self.patch(url, body=body, expected_status=400)
+
+ # Perform a GET call to ensure that the content remains
+ # the same (as DEFAULT_ENDPOINT_GROUP_BODY) after attempting to update
+ # with an invalid filter
+ url = '/OS-EP-FILTER/endpoint_groups/%(endpoint_group_id)s' % {
+ 'endpoint_group_id': endpoint_group_id}
+ r = self.get(url)
+ del r.result['endpoint_group']['id']
+ del r.result['endpoint_group']['links']
+ self.assertDictEqual(self.DEFAULT_ENDPOINT_GROUP_BODY, r.result)
+
+ def test_delete_endpoint_group(self):
+ """GET /OS-EP-FILTER/endpoint_groups/{endpoint_group}
+
+ Valid endpoint group test case.
+
+ """
+ # create an endpoint group to work with
+ endpoint_group_id = self._create_valid_endpoint_group(
+ self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY)
+ url = '/OS-EP-FILTER/endpoint_groups/%(endpoint_group_id)s' % {
+ 'endpoint_group_id': endpoint_group_id}
+ self.delete(url)
+ self.get(url, expected_status=404)
+
+ def test_delete_invalid_endpoint_group(self):
+ """GET /OS-EP-FILTER/endpoint_groups/{endpoint_group}
+
+ Invalid endpoint group test case.
+
+ """
+ endpoint_group_id = 'foobar'
+ url = '/OS-EP-FILTER/endpoint_groups/%(endpoint_group_id)s' % {
+ 'endpoint_group_id': endpoint_group_id}
+ self.delete(url, expected_status=404)
+
+ def test_add_endpoint_group_to_project(self):
+ """Create a valid endpoint group and project association."""
+ endpoint_group_id = self._create_valid_endpoint_group(
+ self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY)
+ self._create_endpoint_group_project_association(endpoint_group_id,
+ self.project_id)
+
+ def test_add_endpoint_group_to_project_with_invalid_project_id(self):
+ """Create an invalid endpoint group and project association."""
+ # create an endpoint group to work with
+ endpoint_group_id = self._create_valid_endpoint_group(
+ self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY)
+
+ # associate endpoint group with project
+ project_id = uuid.uuid4().hex
+ url = self._get_project_endpoint_group_url(
+ endpoint_group_id, project_id)
+ self.put(url, expected_status=404)
+
+ def test_get_endpoint_group_in_project(self):
+ """Test retrieving project endpoint group association."""
+ # create an endpoint group to work with
+ endpoint_group_id = self._create_valid_endpoint_group(
+ self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY)
+
+ # associate endpoint group with project
+ url = self._get_project_endpoint_group_url(
+ endpoint_group_id, self.project_id)
+ self.put(url)
+ response = self.get(url)
+ self.assertEqual(
+ endpoint_group_id,
+ response.result['project_endpoint_group']['endpoint_group_id'])
+ self.assertEqual(
+ self.project_id,
+ response.result['project_endpoint_group']['project_id'])
+
+ def test_get_invalid_endpoint_group_in_project(self):
+ """Test retrieving project endpoint group association."""
+ endpoint_group_id = uuid.uuid4().hex
+ project_id = uuid.uuid4().hex
+ url = self._get_project_endpoint_group_url(
+ endpoint_group_id, project_id)
+ self.get(url, expected_status=404)
+
+ def test_check_endpoint_group_to_project(self):
+ """Test HEAD with a valid endpoint group and project association."""
+ endpoint_group_id = self._create_valid_endpoint_group(
+ self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY)
+ self._create_endpoint_group_project_association(endpoint_group_id,
+ self.project_id)
+ url = self._get_project_endpoint_group_url(
+ endpoint_group_id, self.project_id)
+ self.head(url, expected_status=200)
+
+ def test_check_endpoint_group_to_project_with_invalid_project_id(self):
+ """Test HEAD with an invalid endpoint group and project association."""
+ # create an endpoint group to work with
+ endpoint_group_id = self._create_valid_endpoint_group(
+ self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY)
+
+ # create an endpoint group to project association
+ url = self._get_project_endpoint_group_url(
+ endpoint_group_id, self.project_id)
+ self.put(url)
+
+ # send a head request with an invalid project id
+ project_id = uuid.uuid4().hex
+ url = self._get_project_endpoint_group_url(
+ endpoint_group_id, project_id)
+ self.head(url, expected_status=404)
+
+ def test_list_endpoint_groups(self):
+ """GET /OS-EP-FILTER/endpoint_groups."""
+ # create an endpoint group to work with
+ endpoint_group_id = self._create_valid_endpoint_group(
+ self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY)
+
+ # recover all endpoint groups
+ url = '/OS-EP-FILTER/endpoint_groups'
+ r = self.get(url)
+ self.assertNotEmpty(r.result['endpoint_groups'])
+ self.assertEqual(endpoint_group_id,
+ r.result['endpoint_groups'][0].get('id'))
+
+ def test_list_projects_associated_with_endpoint_group(self):
+ """GET /OS-EP-FILTER/endpoint_groups/{endpoint_group}/projects
+
+ Valid endpoint group test case.
+
+ """
+ # create an endpoint group to work with
+ endpoint_group_id = self._create_valid_endpoint_group(
+ self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY)
+
+ # associate endpoint group with project
+ self._create_endpoint_group_project_association(endpoint_group_id,
+ self.project_id)
+
+ # recover list of projects associated with endpoint group
+ url = ('/OS-EP-FILTER/endpoint_groups/%(endpoint_group_id)s'
+ '/projects' %
+ {'endpoint_group_id': endpoint_group_id})
+ self.get(url)
+
+ def test_list_endpoints_associated_with_endpoint_group(self):
+ """GET /OS-EP-FILTER/endpoint_groups/{endpoint_group}/endpoints
+
+ Valid endpoint group test case.
+
+ """
+ # create a service
+ service_ref = self.new_service_ref()
+ response = self.post(
+ '/services',
+ body={'service': service_ref})
+
+ service_id = response.result['service']['id']
+
+ # create an endpoint
+ endpoint_ref = self.new_endpoint_ref(service_id=service_id)
+ response = self.post(
+ '/endpoints',
+ body={'endpoint': endpoint_ref})
+ endpoint_id = response.result['endpoint']['id']
+
+ # create an endpoint group
+ body = copy.deepcopy(self.DEFAULT_ENDPOINT_GROUP_BODY)
+ body['endpoint_group']['filters'] = {'service_id': service_id}
+ endpoint_group_id = self._create_valid_endpoint_group(
+ self.DEFAULT_ENDPOINT_GROUP_URL, body)
+
+ # create association
+ self._create_endpoint_group_project_association(endpoint_group_id,
+ self.project_id)
+
+ # recover list of endpoints associated with endpoint group
+ url = ('/OS-EP-FILTER/endpoint_groups/%(endpoint_group_id)s'
+ '/endpoints' % {'endpoint_group_id': endpoint_group_id})
+ r = self.get(url)
+ self.assertNotEmpty(r.result['endpoints'])
+ self.assertEqual(endpoint_id, r.result['endpoints'][0].get('id'))
+
+ def test_list_endpoints_associated_with_project_endpoint_group(self):
+ """GET /OS-EP-FILTER/projects/{project_id}/endpoints
+
+ Valid project, endpoint id, and endpoint group test case.
+
+ """
+ # create a temporary service
+ service_ref = self.new_service_ref()
+ response = self.post('/services', body={'service': service_ref})
+ service_id2 = response.result['service']['id']
+
+ # create additional endpoints
+ self._create_endpoint_and_associations(
+ self.default_domain_project_id, service_id2)
+ self._create_endpoint_and_associations(
+ self.default_domain_project_id)
+
+ # create project and endpoint association with default endpoint:
+ self.put(self.default_request_url)
+
+ # create an endpoint group that contains a different endpoint
+ body = copy.deepcopy(self.DEFAULT_ENDPOINT_GROUP_BODY)
+ body['endpoint_group']['filters'] = {'service_id': service_id2}
+ endpoint_group_id = self._create_valid_endpoint_group(
+ self.DEFAULT_ENDPOINT_GROUP_URL, body)
+
+ # associate endpoint group with project
+ self._create_endpoint_group_project_association(
+ endpoint_group_id, self.default_domain_project_id)
+
+ # Now get a list of the filtered endpoints
+ endpoints_url = '/OS-EP-FILTER/projects/%(project_id)s/endpoints' % {
+ 'project_id': self.default_domain_project_id}
+ r = self.get(endpoints_url)
+ endpoints = self.assertValidEndpointListResponse(r)
+ self.assertEqual(len(endpoints), 2)
+
+ # Now remove project endpoint group association
+ url = self._get_project_endpoint_group_url(
+ endpoint_group_id, self.default_domain_project_id)
+ self.delete(url)
+
+ # Now remove endpoint group
+ url = '/OS-EP-FILTER/endpoint_groups/%(endpoint_group_id)s' % {
+ 'endpoint_group_id': endpoint_group_id}
+ self.delete(url)
+
+ r = self.get(endpoints_url)
+ endpoints = self.assertValidEndpointListResponse(r)
+ self.assertEqual(len(endpoints), 1)
+
+ def test_endpoint_group_project_cleanup_with_project(self):
+ # create endpoint group
+ endpoint_group_id = self._create_valid_endpoint_group(
+ self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY)
+
+ # create new project and associate with endpoint_group
+ project_ref = self.new_project_ref(domain_id=self.domain_id)
+ r = self.post('/projects', body={'project': project_ref})
+ project = self.assertValidProjectResponse(r, project_ref)
+ url = self._get_project_endpoint_group_url(endpoint_group_id,
+ project['id'])
+ self.put(url)
+
+ # check that we can recover the project endpoint group association
+ self.get(url)
+
+ # Now delete the project and then try and retrieve the project
+ # endpoint group association again
+ self.delete('/projects/%(project_id)s' % {
+ 'project_id': project['id']})
+ self.get(url, expected_status=404)
+
+ def test_endpoint_group_project_cleanup_with_endpoint_group(self):
+ # create endpoint group
+ endpoint_group_id = self._create_valid_endpoint_group(
+ self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY)
+
+ # create new project and associate with endpoint_group
+ project_ref = self.new_project_ref(domain_id=self.domain_id)
+ r = self.post('/projects', body={'project': project_ref})
+ project = self.assertValidProjectResponse(r, project_ref)
+ url = self._get_project_endpoint_group_url(endpoint_group_id,
+ project['id'])
+ self.put(url)
+
+ # check that we can recover the project endpoint group association
+ self.get(url)
+
+ # now remove the project endpoint group association
+ self.delete(url)
+ self.get(url, expected_status=404)
+
+ def test_removing_an_endpoint_group_project(self):
+ # create an endpoint group
+ endpoint_group_id = self._create_valid_endpoint_group(
+ self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY)
+
+ # create an endpoint_group project
+ url = self._get_project_endpoint_group_url(
+ endpoint_group_id, self.default_domain_project_id)
+ self.put(url)
+
+ # remove the endpoint group project
+ self.delete(url)
+ self.get(url, expected_status=404)
+
+ def _create_valid_endpoint_group(self, url, body):
+ r = self.post(url, body=body)
+ return r.result['endpoint_group']['id']
+
+ def _create_endpoint_group_project_association(self,
+ endpoint_group_id,
+ project_id):
+ url = self._get_project_endpoint_group_url(endpoint_group_id,
+ project_id)
+ self.put(url)
+
+ def _get_project_endpoint_group_url(self,
+ endpoint_group_id,
+ project_id):
+ return ('/OS-EP-FILTER/endpoint_groups/%(endpoint_group_id)s'
+ '/projects/%(project_id)s' %
+ {'endpoint_group_id': endpoint_group_id,
+ 'project_id': project_id})
+
+ def _create_endpoint_and_associations(self, project_id, service_id=None):
+ """Creates an endpoint associated with service and project."""
+ if not service_id:
+ # create a new service
+ service_ref = self.new_service_ref()
+ response = self.post(
+ '/services', body={'service': service_ref})
+ service_id = response.result['service']['id']
+
+ # create endpoint
+ endpoint_ref = self.new_endpoint_ref(service_id=service_id)
+ response = self.post('/endpoints', body={'endpoint': endpoint_ref})
+ endpoint = response.result['endpoint']
+
+ # now add endpoint to project
+ self.put('/OS-EP-FILTER/projects/%(project_id)s'
+ '/endpoints/%(endpoint_id)s' % {
+ 'project_id': self.project['id'],
+ 'endpoint_id': endpoint['id']})
+ return endpoint
diff --git a/keystone-moon/keystone/tests/unit/test_auth.py b/keystone-moon/keystone/tests/unit/test_auth.py
new file mode 100644
index 00000000..295e028d
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/test_auth.py
@@ -0,0 +1,1328 @@
+# Copyright 2012 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import copy
+import datetime
+import uuid
+
+import mock
+from oslo_config import cfg
+from oslo_utils import timeutils
+from testtools import matchers
+
+from keystone import assignment
+from keystone import auth
+from keystone.common import authorization
+from keystone import config
+from keystone import exception
+from keystone.models import token_model
+from keystone.tests import unit as tests
+from keystone.tests.unit import default_fixtures
+from keystone.tests.unit.ksfixtures import database
+from keystone import token
+from keystone.token import provider
+from keystone import trust
+
+
+CONF = cfg.CONF
+TIME_FORMAT = '%Y-%m-%dT%H:%M:%S.%fZ'
+DEFAULT_DOMAIN_ID = CONF.identity.default_domain_id
+
+HOST_URL = 'http://keystone:5001'
+
+
+def _build_user_auth(token=None, user_id=None, username=None,
+ password=None, tenant_id=None, tenant_name=None,
+ trust_id=None):
+ """Build auth dictionary.
+
+ It will create an auth dictionary based on all the arguments
+ that it receives.
+ """
+ auth_json = {}
+ if token is not None:
+ auth_json['token'] = token
+ if username or password:
+ auth_json['passwordCredentials'] = {}
+ if username is not None:
+ auth_json['passwordCredentials']['username'] = username
+ if user_id is not None:
+ auth_json['passwordCredentials']['userId'] = user_id
+ if password is not None:
+ auth_json['passwordCredentials']['password'] = password
+ if tenant_name is not None:
+ auth_json['tenantName'] = tenant_name
+ if tenant_id is not None:
+ auth_json['tenantId'] = tenant_id
+ if trust_id is not None:
+ auth_json['trust_id'] = trust_id
+ return auth_json
+
+
+class AuthTest(tests.TestCase):
+ def setUp(self):
+ self.useFixture(database.Database())
+ super(AuthTest, self).setUp()
+
+ self.load_backends()
+ self.load_fixtures(default_fixtures)
+
+ self.context_with_remote_user = {'environment':
+ {'REMOTE_USER': 'FOO',
+ 'AUTH_TYPE': 'Negotiate'}}
+ self.empty_context = {'environment': {}}
+
+ self.controller = token.controllers.Auth()
+
+ def assertEqualTokens(self, a, b, enforce_audit_ids=True):
+ """Assert that two tokens are equal.
+
+ Compare two tokens except for their ids. This also truncates
+ the time in the comparison.
+ """
+ def normalize(token):
+ token['access']['token']['id'] = 'dummy'
+ del token['access']['token']['expires']
+ del token['access']['token']['issued_at']
+ del token['access']['token']['audit_ids']
+ return token
+
+ self.assertCloseEnoughForGovernmentWork(
+ timeutils.parse_isotime(a['access']['token']['expires']),
+ timeutils.parse_isotime(b['access']['token']['expires']))
+ self.assertCloseEnoughForGovernmentWork(
+ timeutils.parse_isotime(a['access']['token']['issued_at']),
+ timeutils.parse_isotime(b['access']['token']['issued_at']))
+ if enforce_audit_ids:
+ self.assertIn(a['access']['token']['audit_ids'][0],
+ b['access']['token']['audit_ids'])
+ self.assertThat(len(a['access']['token']['audit_ids']),
+ matchers.LessThan(3))
+ self.assertThat(len(b['access']['token']['audit_ids']),
+ matchers.LessThan(3))
+
+ return self.assertDictEqual(normalize(a), normalize(b))
+
+
+class AuthBadRequests(AuthTest):
+ def test_no_external_auth(self):
+ """Verify that _authenticate_external() raises exception if N/A."""
+ self.assertRaises(
+ token.controllers.ExternalAuthNotApplicable,
+ self.controller._authenticate_external,
+ context={}, auth={})
+
+ def test_empty_remote_user(self):
+ """Verify that _authenticate_external() raises exception if
+ REMOTE_USER is set as the empty string.
+ """
+ context = {'environment': {'REMOTE_USER': ''}}
+ self.assertRaises(
+ token.controllers.ExternalAuthNotApplicable,
+ self.controller._authenticate_external,
+ context=context, auth={})
+
+ def test_no_token_in_auth(self):
+ """Verify that _authenticate_token() raises exception if no token."""
+ self.assertRaises(
+ exception.ValidationError,
+ self.controller._authenticate_token,
+ None, {})
+
+ def test_no_credentials_in_auth(self):
+ """Verify that _authenticate_local() raises exception if no creds."""
+ self.assertRaises(
+ exception.ValidationError,
+ self.controller._authenticate_local,
+ None, {})
+
+ def test_empty_username_and_userid_in_auth(self):
+ """Verify that empty username and userID raises ValidationError."""
+ self.assertRaises(
+ exception.ValidationError,
+ self.controller._authenticate_local,
+ None, {'passwordCredentials': {'password': 'abc',
+ 'userId': '', 'username': ''}})
+
+ def test_authenticate_blank_request_body(self):
+ """Verify sending empty json dict raises the right exception."""
+ self.assertRaises(exception.ValidationError,
+ self.controller.authenticate,
+ {}, {})
+
+ def test_authenticate_blank_auth(self):
+ """Verify sending blank 'auth' raises the right exception."""
+ body_dict = _build_user_auth()
+ self.assertRaises(exception.ValidationError,
+ self.controller.authenticate,
+ {}, body_dict)
+
+ def test_authenticate_invalid_auth_content(self):
+ """Verify sending invalid 'auth' raises the right exception."""
+ self.assertRaises(exception.ValidationError,
+ self.controller.authenticate,
+ {}, {'auth': 'abcd'})
+
+ def test_authenticate_user_id_too_large(self):
+ """Verify sending large 'userId' raises the right exception."""
+ body_dict = _build_user_auth(user_id='0' * 65, username='FOO',
+ password='foo2')
+ self.assertRaises(exception.ValidationSizeError,
+ self.controller.authenticate,
+ {}, body_dict)
+
+ def test_authenticate_username_too_large(self):
+ """Verify sending large 'username' raises the right exception."""
+ body_dict = _build_user_auth(username='0' * 65, password='foo2')
+ self.assertRaises(exception.ValidationSizeError,
+ self.controller.authenticate,
+ {}, body_dict)
+
+ def test_authenticate_tenant_id_too_large(self):
+ """Verify sending large 'tenantId' raises the right exception."""
+ body_dict = _build_user_auth(username='FOO', password='foo2',
+ tenant_id='0' * 65)
+ self.assertRaises(exception.ValidationSizeError,
+ self.controller.authenticate,
+ {}, body_dict)
+
+ def test_authenticate_tenant_name_too_large(self):
+ """Verify sending large 'tenantName' raises the right exception."""
+ body_dict = _build_user_auth(username='FOO', password='foo2',
+ tenant_name='0' * 65)
+ self.assertRaises(exception.ValidationSizeError,
+ self.controller.authenticate,
+ {}, body_dict)
+
+ def test_authenticate_token_too_large(self):
+ """Verify sending large 'token' raises the right exception."""
+ body_dict = _build_user_auth(token={'id': '0' * 8193})
+ self.assertRaises(exception.ValidationSizeError,
+ self.controller.authenticate,
+ {}, body_dict)
+
+ def test_authenticate_password_too_large(self):
+ """Verify sending large 'password' raises the right exception."""
+ length = CONF.identity.max_password_length + 1
+ body_dict = _build_user_auth(username='FOO', password='0' * length)
+ self.assertRaises(exception.ValidationSizeError,
+ self.controller.authenticate,
+ {}, body_dict)
+
+
+class AuthWithToken(AuthTest):
+ def test_unscoped_token(self):
+ """Verify getting an unscoped token with password creds."""
+ body_dict = _build_user_auth(username='FOO',
+ password='foo2')
+ unscoped_token = self.controller.authenticate({}, body_dict)
+ self.assertNotIn('tenant', unscoped_token['access']['token'])
+
+ def test_auth_invalid_token(self):
+ """Verify exception is raised if invalid token."""
+ body_dict = _build_user_auth(token={"id": uuid.uuid4().hex})
+ self.assertRaises(
+ exception.Unauthorized,
+ self.controller.authenticate,
+ {}, body_dict)
+
+ def test_auth_bad_formatted_token(self):
+ """Verify exception is raised if invalid token."""
+ body_dict = _build_user_auth(token={})
+ self.assertRaises(
+ exception.ValidationError,
+ self.controller.authenticate,
+ {}, body_dict)
+
+ def test_auth_unscoped_token_no_project(self):
+ """Verify getting an unscoped token with an unscoped token."""
+ body_dict = _build_user_auth(
+ username='FOO',
+ password='foo2')
+ unscoped_token = self.controller.authenticate({}, body_dict)
+
+ body_dict = _build_user_auth(
+ token=unscoped_token["access"]["token"])
+ unscoped_token_2 = self.controller.authenticate({}, body_dict)
+
+ self.assertEqualTokens(unscoped_token, unscoped_token_2)
+
+ def test_auth_unscoped_token_project(self):
+ """Verify getting a token in a tenant with an unscoped token."""
+ # Add a role in so we can check we get this back
+ self.assignment_api.add_role_to_user_and_project(
+ self.user_foo['id'],
+ self.tenant_bar['id'],
+ self.role_member['id'])
+ # Get an unscoped tenant
+ body_dict = _build_user_auth(
+ username='FOO',
+ password='foo2')
+ unscoped_token = self.controller.authenticate({}, body_dict)
+ # Get a token on BAR tenant using the unscoped tenant
+ body_dict = _build_user_auth(
+ token=unscoped_token["access"]["token"],
+ tenant_name="BAR")
+ scoped_token = self.controller.authenticate({}, body_dict)
+
+ tenant = scoped_token["access"]["token"]["tenant"]
+ roles = scoped_token["access"]["metadata"]["roles"]
+ self.assertEqual(self.tenant_bar['id'], tenant["id"])
+ self.assertThat(roles, matchers.Contains(self.role_member['id']))
+
+ def test_auth_token_project_group_role(self):
+ """Verify getting a token in a tenant with group roles."""
+ # Add a v2 style role in so we can check we get this back
+ self.assignment_api.add_role_to_user_and_project(
+ self.user_foo['id'],
+ self.tenant_bar['id'],
+ self.role_member['id'])
+ # Now create a group role for this user as well
+ domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.resource_api.create_domain(domain1['id'], domain1)
+ new_group = {'domain_id': domain1['id'], 'name': uuid.uuid4().hex}
+ new_group = self.identity_api.create_group(new_group)
+ self.identity_api.add_user_to_group(self.user_foo['id'],
+ new_group['id'])
+ self.assignment_api.create_grant(
+ group_id=new_group['id'],
+ project_id=self.tenant_bar['id'],
+ role_id=self.role_admin['id'])
+
+ # Get a scoped token for the tenant
+ body_dict = _build_user_auth(
+ username='FOO',
+ password='foo2',
+ tenant_name="BAR")
+
+ scoped_token = self.controller.authenticate({}, body_dict)
+
+ tenant = scoped_token["access"]["token"]["tenant"]
+ roles = scoped_token["access"]["metadata"]["roles"]
+ self.assertEqual(self.tenant_bar['id'], tenant["id"])
+ self.assertIn(self.role_member['id'], roles)
+ self.assertIn(self.role_admin['id'], roles)
+
+ def test_belongs_to_no_tenant(self):
+ r = self.controller.authenticate(
+ {},
+ auth={
+ 'passwordCredentials': {
+ 'username': self.user_foo['name'],
+ 'password': self.user_foo['password']
+ }
+ })
+ unscoped_token_id = r['access']['token']['id']
+ self.assertRaises(
+ exception.Unauthorized,
+ self.controller.validate_token,
+ dict(is_admin=True, query_string={'belongsTo': 'BAR'}),
+ token_id=unscoped_token_id)
+
+ def test_belongs_to(self):
+ body_dict = _build_user_auth(
+ username='FOO',
+ password='foo2',
+ tenant_name="BAR")
+
+ scoped_token = self.controller.authenticate({}, body_dict)
+ scoped_token_id = scoped_token['access']['token']['id']
+
+ self.assertRaises(
+ exception.Unauthorized,
+ self.controller.validate_token,
+ dict(is_admin=True, query_string={'belongsTo': 'me'}),
+ token_id=scoped_token_id)
+
+ self.assertRaises(
+ exception.Unauthorized,
+ self.controller.validate_token,
+ dict(is_admin=True, query_string={'belongsTo': 'BAR'}),
+ token_id=scoped_token_id)
+
+ def test_token_auth_with_binding(self):
+ self.config_fixture.config(group='token', bind=['kerberos'])
+ body_dict = _build_user_auth()
+ unscoped_token = self.controller.authenticate(
+ self.context_with_remote_user, body_dict)
+
+ # the token should have bind information in it
+ bind = unscoped_token['access']['token']['bind']
+ self.assertEqual('FOO', bind['kerberos'])
+
+ body_dict = _build_user_auth(
+ token=unscoped_token['access']['token'],
+ tenant_name='BAR')
+
+ # using unscoped token without remote user context fails
+ self.assertRaises(
+ exception.Unauthorized,
+ self.controller.authenticate,
+ self.empty_context, body_dict)
+
+ # using token with remote user context succeeds
+ scoped_token = self.controller.authenticate(
+ self.context_with_remote_user, body_dict)
+
+ # the bind information should be carried over from the original token
+ bind = scoped_token['access']['token']['bind']
+ self.assertEqual('FOO', bind['kerberos'])
+
+ def test_deleting_role_revokes_token(self):
+ role_controller = assignment.controllers.Role()
+ project1 = {'id': 'Project1', 'name': uuid.uuid4().hex,
+ 'domain_id': DEFAULT_DOMAIN_ID}
+ self.resource_api.create_project(project1['id'], project1)
+ role_one = {'id': 'role_one', 'name': uuid.uuid4().hex}
+ self.role_api.create_role(role_one['id'], role_one)
+ self.assignment_api.add_role_to_user_and_project(
+ self.user_foo['id'], project1['id'], role_one['id'])
+ no_context = {}
+
+ # Get a scoped token for the tenant
+ body_dict = _build_user_auth(
+ username=self.user_foo['name'],
+ password=self.user_foo['password'],
+ tenant_name=project1['name'])
+ token = self.controller.authenticate(no_context, body_dict)
+ # Ensure it is valid
+ token_id = token['access']['token']['id']
+ self.controller.validate_token(
+ dict(is_admin=True, query_string={}),
+ token_id=token_id)
+
+ # Delete the role, which should invalidate the token
+ role_controller.delete_role(
+ dict(is_admin=True, query_string={}), role_one['id'])
+
+ # Check the token is now invalid
+ self.assertRaises(
+ exception.TokenNotFound,
+ self.controller.validate_token,
+ dict(is_admin=True, query_string={}),
+ token_id=token_id)
+
+ def test_only_original_audit_id_is_kept(self):
+ context = {}
+
+ def get_audit_ids(token):
+ return token['access']['token']['audit_ids']
+
+ # get a token
+ body_dict = _build_user_auth(username='FOO', password='foo2')
+ unscoped_token = self.controller.authenticate(context, body_dict)
+ starting_audit_id = get_audit_ids(unscoped_token)[0]
+ self.assertIsNotNone(starting_audit_id)
+
+ # get another token to ensure the correct parent audit_id is set
+ body_dict = _build_user_auth(token=unscoped_token["access"]["token"])
+ unscoped_token_2 = self.controller.authenticate(context, body_dict)
+ audit_ids = get_audit_ids(unscoped_token_2)
+ self.assertThat(audit_ids, matchers.HasLength(2))
+ self.assertThat(audit_ids[-1], matchers.Equals(starting_audit_id))
+
+ # get another token from token 2 and ensure the correct parent
+ # audit_id is set
+ body_dict = _build_user_auth(token=unscoped_token_2["access"]["token"])
+ unscoped_token_3 = self.controller.authenticate(context, body_dict)
+ audit_ids = get_audit_ids(unscoped_token_3)
+ self.assertThat(audit_ids, matchers.HasLength(2))
+ self.assertThat(audit_ids[-1], matchers.Equals(starting_audit_id))
+
+ def test_revoke_by_audit_chain_id_original_token(self):
+ self.config_fixture.config(group='token', revoke_by_id=False)
+ context = {}
+
+ # get a token
+ body_dict = _build_user_auth(username='FOO', password='foo2')
+ unscoped_token = self.controller.authenticate(context, body_dict)
+ token_id = unscoped_token['access']['token']['id']
+ # get a second token
+ body_dict = _build_user_auth(token=unscoped_token["access"]["token"])
+ unscoped_token_2 = self.controller.authenticate(context, body_dict)
+ token_2_id = unscoped_token_2['access']['token']['id']
+
+ self.token_provider_api.revoke_token(token_id, revoke_chain=True)
+
+ self.assertRaises(exception.TokenNotFound,
+ self.token_provider_api.validate_v2_token,
+ token_id=token_id)
+ self.assertRaises(exception.TokenNotFound,
+ self.token_provider_api.validate_v2_token,
+ token_id=token_2_id)
+
+ def test_revoke_by_audit_chain_id_chained_token(self):
+ self.config_fixture.config(group='token', revoke_by_id=False)
+ context = {}
+
+ # get a token
+ body_dict = _build_user_auth(username='FOO', password='foo2')
+ unscoped_token = self.controller.authenticate(context, body_dict)
+ token_id = unscoped_token['access']['token']['id']
+ # get a second token
+ body_dict = _build_user_auth(token=unscoped_token["access"]["token"])
+ unscoped_token_2 = self.controller.authenticate(context, body_dict)
+ token_2_id = unscoped_token_2['access']['token']['id']
+
+ self.token_provider_api.revoke_token(token_2_id, revoke_chain=True)
+
+ self.assertRaises(exception.TokenNotFound,
+ self.token_provider_api.validate_v2_token,
+ token_id=token_id)
+ self.assertRaises(exception.TokenNotFound,
+ self.token_provider_api.validate_v2_token,
+ token_id=token_2_id)
+
+ def _mock_audit_info(self, parent_audit_id):
+ # NOTE(morgainfainberg): The token model and other cases that are
+ # extracting the audit id expect 'None' if the audit id doesn't
+ # exist. This ensures that the audit_id is None and the
+ # audit_chain_id will also return None.
+ return [None, None]
+
+ def test_revoke_with_no_audit_info(self):
+ self.config_fixture.config(group='token', revoke_by_id=False)
+ context = {}
+
+ with mock.patch.object(provider, 'audit_info', self._mock_audit_info):
+ # get a token
+ body_dict = _build_user_auth(username='FOO', password='foo2')
+ unscoped_token = self.controller.authenticate(context, body_dict)
+ token_id = unscoped_token['access']['token']['id']
+ # get a second token
+ body_dict = _build_user_auth(
+ token=unscoped_token['access']['token'])
+ unscoped_token_2 = self.controller.authenticate(context, body_dict)
+ token_2_id = unscoped_token_2['access']['token']['id']
+
+ self.token_provider_api.revoke_token(token_id, revoke_chain=True)
+
+ revoke_events = self.revoke_api.list_events()
+ self.assertThat(revoke_events, matchers.HasLength(1))
+ revoke_event = revoke_events[0].to_dict()
+ self.assertIn('expires_at', revoke_event)
+ self.assertEqual(unscoped_token_2['access']['token']['expires'],
+ revoke_event['expires_at'])
+
+ self.assertRaises(exception.TokenNotFound,
+ self.token_provider_api.validate_v2_token,
+ token_id=token_id)
+ self.assertRaises(exception.TokenNotFound,
+ self.token_provider_api.validate_v2_token,
+ token_id=token_2_id)
+
+ # get a new token, with no audit info
+ body_dict = _build_user_auth(username='FOO', password='foo2')
+ unscoped_token = self.controller.authenticate(context, body_dict)
+ token_id = unscoped_token['access']['token']['id']
+ # get a second token
+ body_dict = _build_user_auth(
+ token=unscoped_token['access']['token'])
+ unscoped_token_2 = self.controller.authenticate(context, body_dict)
+ token_2_id = unscoped_token_2['access']['token']['id']
+
+ # Revoke by audit_id, no audit_info means both parent and child
+ # token are revoked.
+ self.token_provider_api.revoke_token(token_id)
+
+ revoke_events = self.revoke_api.list_events()
+ self.assertThat(revoke_events, matchers.HasLength(2))
+ revoke_event = revoke_events[1].to_dict()
+ self.assertIn('expires_at', revoke_event)
+ self.assertEqual(unscoped_token_2['access']['token']['expires'],
+ revoke_event['expires_at'])
+
+ self.assertRaises(exception.TokenNotFound,
+ self.token_provider_api.validate_v2_token,
+ token_id=token_id)
+ self.assertRaises(exception.TokenNotFound,
+ self.token_provider_api.validate_v2_token,
+ token_id=token_2_id)
+
+
+class AuthWithPasswordCredentials(AuthTest):
+ def test_auth_invalid_user(self):
+ """Verify exception is raised if invalid user."""
+ body_dict = _build_user_auth(
+ username=uuid.uuid4().hex,
+ password=uuid.uuid4().hex)
+ self.assertRaises(
+ exception.Unauthorized,
+ self.controller.authenticate,
+ {}, body_dict)
+
+ def test_auth_valid_user_invalid_password(self):
+ """Verify exception is raised if invalid password."""
+ body_dict = _build_user_auth(
+ username="FOO",
+ password=uuid.uuid4().hex)
+ self.assertRaises(
+ exception.Unauthorized,
+ self.controller.authenticate,
+ {}, body_dict)
+
+ def test_auth_empty_password(self):
+ """Verify exception is raised if empty password."""
+ body_dict = _build_user_auth(
+ username="FOO",
+ password="")
+ self.assertRaises(
+ exception.Unauthorized,
+ self.controller.authenticate,
+ {}, body_dict)
+
+ def test_auth_no_password(self):
+ """Verify exception is raised if empty password."""
+ body_dict = _build_user_auth(username="FOO")
+ self.assertRaises(
+ exception.ValidationError,
+ self.controller.authenticate,
+ {}, body_dict)
+
+ def test_authenticate_blank_password_credentials(self):
+ """Sending empty dict as passwordCredentials raises a 400 error."""
+ body_dict = {'passwordCredentials': {}, 'tenantName': 'demo'}
+ self.assertRaises(exception.ValidationError,
+ self.controller.authenticate,
+ {}, body_dict)
+
+ def test_authenticate_no_username(self):
+ """Verify skipping username raises the right exception."""
+ body_dict = _build_user_auth(password="pass",
+ tenant_name="demo")
+ self.assertRaises(exception.ValidationError,
+ self.controller.authenticate,
+ {}, body_dict)
+
+ def test_bind_without_remote_user(self):
+ self.config_fixture.config(group='token', bind=['kerberos'])
+ body_dict = _build_user_auth(username='FOO', password='foo2',
+ tenant_name='BAR')
+ token = self.controller.authenticate({}, body_dict)
+ self.assertNotIn('bind', token['access']['token'])
+
+ def test_change_default_domain_id(self):
+ # If the default_domain_id config option is not the default then the
+ # user in auth data is from the new default domain.
+
+ # 1) Create a new domain.
+ new_domain_id = uuid.uuid4().hex
+ new_domain = {
+ 'description': uuid.uuid4().hex,
+ 'enabled': True,
+ 'id': new_domain_id,
+ 'name': uuid.uuid4().hex,
+ }
+
+ self.resource_api.create_domain(new_domain_id, new_domain)
+
+ # 2) Create user "foo" in new domain with different password than
+ # default-domain foo.
+ new_user_password = uuid.uuid4().hex
+ new_user = {
+ 'name': self.user_foo['name'],
+ 'domain_id': new_domain_id,
+ 'password': new_user_password,
+ 'email': 'foo@bar2.com',
+ }
+
+ new_user = self.identity_api.create_user(new_user)
+
+ # 3) Update the default_domain_id config option to the new domain
+
+ self.config_fixture.config(group='identity',
+ default_domain_id=new_domain_id)
+
+ # 4) Authenticate as "foo" using the password in the new domain.
+
+ body_dict = _build_user_auth(
+ username=self.user_foo['name'],
+ password=new_user_password)
+
+ # The test is successful if this doesn't raise, so no need to assert.
+ self.controller.authenticate({}, body_dict)
+
+
+class AuthWithRemoteUser(AuthTest):
+ def test_unscoped_remote_authn(self):
+ """Verify getting an unscoped token with external authn."""
+ body_dict = _build_user_auth(
+ username='FOO',
+ password='foo2')
+ local_token = self.controller.authenticate(
+ {}, body_dict)
+
+ body_dict = _build_user_auth()
+ remote_token = self.controller.authenticate(
+ self.context_with_remote_user, body_dict)
+
+ self.assertEqualTokens(local_token, remote_token,
+ enforce_audit_ids=False)
+
+ def test_unscoped_remote_authn_jsonless(self):
+ """Verify that external auth with invalid request fails."""
+ self.assertRaises(
+ exception.ValidationError,
+ self.controller.authenticate,
+ {'REMOTE_USER': 'FOO'},
+ None)
+
+ def test_scoped_remote_authn(self):
+ """Verify getting a token with external authn."""
+ body_dict = _build_user_auth(
+ username='FOO',
+ password='foo2',
+ tenant_name='BAR')
+ local_token = self.controller.authenticate(
+ {}, body_dict)
+
+ body_dict = _build_user_auth(
+ tenant_name='BAR')
+ remote_token = self.controller.authenticate(
+ self.context_with_remote_user, body_dict)
+
+ self.assertEqualTokens(local_token, remote_token,
+ enforce_audit_ids=False)
+
+ def test_scoped_nometa_remote_authn(self):
+ """Verify getting a token with external authn and no metadata."""
+ body_dict = _build_user_auth(
+ username='TWO',
+ password='two2',
+ tenant_name='BAZ')
+ local_token = self.controller.authenticate(
+ {}, body_dict)
+
+ body_dict = _build_user_auth(tenant_name='BAZ')
+ remote_token = self.controller.authenticate(
+ {'environment': {'REMOTE_USER': 'TWO'}}, body_dict)
+
+ self.assertEqualTokens(local_token, remote_token,
+ enforce_audit_ids=False)
+
+ def test_scoped_remote_authn_invalid_user(self):
+ """Verify that external auth with invalid user fails."""
+ body_dict = _build_user_auth(tenant_name="BAR")
+ self.assertRaises(
+ exception.Unauthorized,
+ self.controller.authenticate,
+ {'environment': {'REMOTE_USER': uuid.uuid4().hex}},
+ body_dict)
+
+ def test_bind_with_kerberos(self):
+ self.config_fixture.config(group='token', bind=['kerberos'])
+ body_dict = _build_user_auth(tenant_name="BAR")
+ token = self.controller.authenticate(self.context_with_remote_user,
+ body_dict)
+ self.assertEqual('FOO', token['access']['token']['bind']['kerberos'])
+
+ def test_bind_without_config_opt(self):
+ self.config_fixture.config(group='token', bind=['x509'])
+ body_dict = _build_user_auth(tenant_name='BAR')
+ token = self.controller.authenticate(self.context_with_remote_user,
+ body_dict)
+ self.assertNotIn('bind', token['access']['token'])
+
+
+class AuthWithTrust(AuthTest):
+ def setUp(self):
+ super(AuthWithTrust, self).setUp()
+
+ self.trust_controller = trust.controllers.TrustV3()
+ self.auth_v3_controller = auth.controllers.Auth()
+ self.trustor = self.user_foo
+ self.trustee = self.user_two
+ self.assigned_roles = [self.role_member['id'],
+ self.role_browser['id']]
+ for assigned_role in self.assigned_roles:
+ self.assignment_api.add_role_to_user_and_project(
+ self.trustor['id'], self.tenant_bar['id'], assigned_role)
+
+ self.sample_data = {'trustor_user_id': self.trustor['id'],
+ 'trustee_user_id': self.trustee['id'],
+ 'project_id': self.tenant_bar['id'],
+ 'impersonation': True,
+ 'roles': [{'id': self.role_browser['id']},
+ {'name': self.role_member['name']}]}
+
+ def config_overrides(self):
+ super(AuthWithTrust, self).config_overrides()
+ self.config_fixture.config(group='trust', enabled=True)
+
+ def _create_auth_context(self, token_id):
+ token_ref = token_model.KeystoneToken(
+ token_id=token_id,
+ token_data=self.token_provider_api.validate_token(token_id))
+ auth_context = authorization.token_to_auth_context(token_ref)
+ return {'environment': {authorization.AUTH_CONTEXT_ENV: auth_context},
+ 'token_id': token_id,
+ 'host_url': HOST_URL}
+
+ def create_trust(self, trust_data, trustor_name, expires_at=None,
+ impersonation=True):
+ username = trustor_name
+ password = 'foo2'
+ unscoped_token = self.get_unscoped_token(username, password)
+ context = self._create_auth_context(
+ unscoped_token['access']['token']['id'])
+ trust_data_copy = copy.deepcopy(trust_data)
+ trust_data_copy['expires_at'] = expires_at
+ trust_data_copy['impersonation'] = impersonation
+
+ return self.trust_controller.create_trust(
+ context, trust=trust_data_copy)['trust']
+
+ def get_unscoped_token(self, username, password='foo2'):
+ body_dict = _build_user_auth(username=username, password=password)
+ return self.controller.authenticate({}, body_dict)
+
+ def build_v2_token_request(self, username, password, trust,
+ tenant_id=None):
+ if not tenant_id:
+ tenant_id = self.tenant_bar['id']
+ unscoped_token = self.get_unscoped_token(username, password)
+ unscoped_token_id = unscoped_token['access']['token']['id']
+ request_body = _build_user_auth(token={'id': unscoped_token_id},
+ trust_id=trust['id'],
+ tenant_id=tenant_id)
+ return request_body
+
+ def test_create_trust_bad_data_fails(self):
+ unscoped_token = self.get_unscoped_token(self.trustor['name'])
+ context = self._create_auth_context(
+ unscoped_token['access']['token']['id'])
+ bad_sample_data = {'trustor_user_id': self.trustor['id'],
+ 'project_id': self.tenant_bar['id'],
+ 'roles': [{'id': self.role_browser['id']}]}
+
+ self.assertRaises(exception.ValidationError,
+ self.trust_controller.create_trust,
+ context, trust=bad_sample_data)
+
+ def test_create_trust_no_roles(self):
+ unscoped_token = self.get_unscoped_token(self.trustor['name'])
+ context = {'token_id': unscoped_token['access']['token']['id']}
+ self.sample_data['roles'] = []
+ self.assertRaises(exception.Forbidden,
+ self.trust_controller.create_trust,
+ context, trust=self.sample_data)
+
+ def test_create_trust(self):
+ expires_at = timeutils.strtime(timeutils.utcnow() +
+ datetime.timedelta(minutes=10),
+ fmt=TIME_FORMAT)
+ new_trust = self.create_trust(self.sample_data, self.trustor['name'],
+ expires_at=expires_at)
+ self.assertEqual(self.trustor['id'], new_trust['trustor_user_id'])
+ self.assertEqual(self.trustee['id'], new_trust['trustee_user_id'])
+ role_ids = [self.role_browser['id'], self.role_member['id']]
+ self.assertTrue(timeutils.parse_strtime(new_trust['expires_at'],
+ fmt=TIME_FORMAT))
+ self.assertIn('%s/v3/OS-TRUST/' % HOST_URL,
+ new_trust['links']['self'])
+ self.assertIn('%s/v3/OS-TRUST/' % HOST_URL,
+ new_trust['roles_links']['self'])
+
+ for role in new_trust['roles']:
+ self.assertIn(role['id'], role_ids)
+
+ def test_create_trust_expires_bad(self):
+ self.assertRaises(exception.ValidationTimeStampError,
+ self.create_trust, self.sample_data,
+ self.trustor['name'], expires_at="bad")
+ self.assertRaises(exception.ValidationTimeStampError,
+ self.create_trust, self.sample_data,
+ self.trustor['name'], expires_at="")
+ self.assertRaises(exception.ValidationTimeStampError,
+ self.create_trust, self.sample_data,
+ self.trustor['name'], expires_at="Z")
+
+ def test_create_trust_without_project_id(self):
+ """Verify that trust can be created without project id and
+ token can be generated with that trust.
+ """
+ unscoped_token = self.get_unscoped_token(self.trustor['name'])
+ context = self._create_auth_context(
+ unscoped_token['access']['token']['id'])
+ self.sample_data['project_id'] = None
+ self.sample_data['roles'] = []
+ new_trust = self.trust_controller.create_trust(
+ context, trust=self.sample_data)['trust']
+ self.assertEqual(self.trustor['id'], new_trust['trustor_user_id'])
+ self.assertEqual(self.trustee['id'], new_trust['trustee_user_id'])
+ self.assertIs(new_trust['impersonation'], True)
+ auth_response = self.fetch_v2_token_from_trust(new_trust)
+ token_user = auth_response['access']['user']
+ self.assertEqual(token_user['id'], new_trust['trustor_user_id'])
+
+ def test_get_trust(self):
+ unscoped_token = self.get_unscoped_token(self.trustor['name'])
+ context = {'token_id': unscoped_token['access']['token']['id'],
+ 'host_url': HOST_URL}
+ new_trust = self.trust_controller.create_trust(
+ context, trust=self.sample_data)['trust']
+ trust = self.trust_controller.get_trust(context,
+ new_trust['id'])['trust']
+ self.assertEqual(self.trustor['id'], trust['trustor_user_id'])
+ self.assertEqual(self.trustee['id'], trust['trustee_user_id'])
+ role_ids = [self.role_browser['id'], self.role_member['id']]
+ for role in new_trust['roles']:
+ self.assertIn(role['id'], role_ids)
+
+ def test_create_trust_no_impersonation(self):
+ new_trust = self.create_trust(self.sample_data, self.trustor['name'],
+ expires_at=None, impersonation=False)
+ self.assertEqual(self.trustor['id'], new_trust['trustor_user_id'])
+ self.assertEqual(self.trustee['id'], new_trust['trustee_user_id'])
+ self.assertIs(new_trust['impersonation'], False)
+ auth_response = self.fetch_v2_token_from_trust(new_trust)
+ token_user = auth_response['access']['user']
+ self.assertEqual(token_user['id'], new_trust['trustee_user_id'])
+
+ # TODO(ayoung): Endpoints
+
+ def test_create_trust_impersonation(self):
+ new_trust = self.create_trust(self.sample_data, self.trustor['name'])
+ self.assertEqual(self.trustor['id'], new_trust['trustor_user_id'])
+ self.assertEqual(self.trustee['id'], new_trust['trustee_user_id'])
+ self.assertIs(new_trust['impersonation'], True)
+ auth_response = self.fetch_v2_token_from_trust(new_trust)
+ token_user = auth_response['access']['user']
+ self.assertEqual(token_user['id'], new_trust['trustor_user_id'])
+
+ def test_token_from_trust_wrong_user_fails(self):
+ new_trust = self.create_trust(self.sample_data, self.trustor['name'])
+ request_body = self.build_v2_token_request('FOO', 'foo2', new_trust)
+ self.assertRaises(exception.Forbidden, self.controller.authenticate,
+ {}, request_body)
+
+ def test_token_from_trust_wrong_project_fails(self):
+ for assigned_role in self.assigned_roles:
+ self.assignment_api.add_role_to_user_and_project(
+ self.trustor['id'], self.tenant_baz['id'], assigned_role)
+ new_trust = self.create_trust(self.sample_data, self.trustor['name'])
+ request_body = self.build_v2_token_request('TWO', 'two2', new_trust,
+ self.tenant_baz['id'])
+ self.assertRaises(exception.Forbidden, self.controller.authenticate,
+ {}, request_body)
+
+ def fetch_v2_token_from_trust(self, trust):
+ request_body = self.build_v2_token_request('TWO', 'two2', trust)
+ auth_response = self.controller.authenticate({}, request_body)
+ return auth_response
+
+ def fetch_v3_token_from_trust(self, trust, trustee):
+ v3_password_data = {
+ 'identity': {
+ "methods": ["password"],
+ "password": {
+ "user": {
+ "id": trustee["id"],
+ "password": trustee["password"]
+ }
+ }
+ },
+ 'scope': {
+ 'project': {
+ 'id': self.tenant_baz['id']
+ }
+ }
+ }
+ auth_response = (self.auth_v3_controller.authenticate_for_token
+ ({'environment': {},
+ 'query_string': {}},
+ v3_password_data))
+ token = auth_response.headers['X-Subject-Token']
+
+ v3_req_with_trust = {
+ "identity": {
+ "methods": ["token"],
+ "token": {"id": token}},
+ "scope": {
+ "OS-TRUST:trust": {"id": trust['id']}}}
+ token_auth_response = (self.auth_v3_controller.authenticate_for_token
+ ({'environment': {},
+ 'query_string': {}},
+ v3_req_with_trust))
+ return token_auth_response
+
+ def test_create_v3_token_from_trust(self):
+ new_trust = self.create_trust(self.sample_data, self.trustor['name'])
+ auth_response = self.fetch_v3_token_from_trust(new_trust, self.trustee)
+
+ trust_token_user = auth_response.json['token']['user']
+ self.assertEqual(self.trustor['id'], trust_token_user['id'])
+
+ trust_token_trust = auth_response.json['token']['OS-TRUST:trust']
+ self.assertEqual(trust_token_trust['id'], new_trust['id'])
+ self.assertEqual(self.trustor['id'],
+ trust_token_trust['trustor_user']['id'])
+ self.assertEqual(self.trustee['id'],
+ trust_token_trust['trustee_user']['id'])
+
+ trust_token_roles = auth_response.json['token']['roles']
+ self.assertEqual(2, len(trust_token_roles))
+
+ def test_v3_trust_token_get_token_fails(self):
+ new_trust = self.create_trust(self.sample_data, self.trustor['name'])
+ auth_response = self.fetch_v3_token_from_trust(new_trust, self.trustee)
+ trust_token = auth_response.headers['X-Subject-Token']
+ v3_token_data = {'identity': {
+ 'methods': ['token'],
+ 'token': {'id': trust_token}
+ }}
+ self.assertRaises(
+ exception.Forbidden,
+ self.auth_v3_controller.authenticate_for_token,
+ {'environment': {},
+ 'query_string': {}}, v3_token_data)
+
+ def test_token_from_trust(self):
+ new_trust = self.create_trust(self.sample_data, self.trustor['name'])
+ auth_response = self.fetch_v2_token_from_trust(new_trust)
+
+ self.assertIsNotNone(auth_response)
+ self.assertEqual(2,
+ len(auth_response['access']['metadata']['roles']),
+ "user_foo has three roles, but the token should"
+ " only get the two roles specified in the trust.")
+
+ def assert_token_count_for_trust(self, trust, expected_value):
+ tokens = self.token_provider_api._persistence._list_tokens(
+ self.trustee['id'], trust_id=trust['id'])
+ token_count = len(tokens)
+ self.assertEqual(expected_value, token_count)
+
+ def test_delete_tokens_for_user_invalidates_tokens_from_trust(self):
+ new_trust = self.create_trust(self.sample_data, self.trustor['name'])
+ self.assert_token_count_for_trust(new_trust, 0)
+ self.fetch_v2_token_from_trust(new_trust)
+ self.assert_token_count_for_trust(new_trust, 1)
+ self.token_provider_api._persistence.delete_tokens_for_user(
+ self.trustee['id'])
+ self.assert_token_count_for_trust(new_trust, 0)
+
+ def test_token_from_trust_cant_get_another_token(self):
+ new_trust = self.create_trust(self.sample_data, self.trustor['name'])
+ auth_response = self.fetch_v2_token_from_trust(new_trust)
+ trust_token_id = auth_response['access']['token']['id']
+ request_body = _build_user_auth(token={'id': trust_token_id},
+ tenant_id=self.tenant_bar['id'])
+ self.assertRaises(
+ exception.Forbidden,
+ self.controller.authenticate, {}, request_body)
+
+ def test_delete_trust_revokes_token(self):
+ unscoped_token = self.get_unscoped_token(self.trustor['name'])
+ new_trust = self.create_trust(self.sample_data, self.trustor['name'])
+ context = self._create_auth_context(
+ unscoped_token['access']['token']['id'])
+ self.fetch_v2_token_from_trust(new_trust)
+ trust_id = new_trust['id']
+ tokens = self.token_provider_api._persistence._list_tokens(
+ self.trustor['id'],
+ trust_id=trust_id)
+ self.assertEqual(1, len(tokens))
+ self.trust_controller.delete_trust(context, trust_id=trust_id)
+ tokens = self.token_provider_api._persistence._list_tokens(
+ self.trustor['id'],
+ trust_id=trust_id)
+ self.assertEqual(0, len(tokens))
+
+ def test_token_from_trust_with_no_role_fails(self):
+ new_trust = self.create_trust(self.sample_data, self.trustor['name'])
+ for assigned_role in self.assigned_roles:
+ self.assignment_api.remove_role_from_user_and_project(
+ self.trustor['id'], self.tenant_bar['id'], assigned_role)
+ request_body = self.build_v2_token_request('TWO', 'two2', new_trust)
+ self.assertRaises(
+ exception.Forbidden,
+ self.controller.authenticate, {}, request_body)
+
+ def test_expired_trust_get_token_fails(self):
+ expiry = "1999-02-18T10:10:00Z"
+ new_trust = self.create_trust(self.sample_data, self.trustor['name'],
+ expiry)
+ request_body = self.build_v2_token_request('TWO', 'two2', new_trust)
+ self.assertRaises(
+ exception.Forbidden,
+ self.controller.authenticate, {}, request_body)
+
+ def test_token_from_trust_with_wrong_role_fails(self):
+ new_trust = self.create_trust(self.sample_data, self.trustor['name'])
+ self.assignment_api.add_role_to_user_and_project(
+ self.trustor['id'],
+ self.tenant_bar['id'],
+ self.role_other['id'])
+ for assigned_role in self.assigned_roles:
+ self.assignment_api.remove_role_from_user_and_project(
+ self.trustor['id'], self.tenant_bar['id'], assigned_role)
+
+ request_body = self.build_v2_token_request('TWO', 'two2', new_trust)
+
+ self.assertRaises(
+ exception.Forbidden,
+ self.controller.authenticate, {}, request_body)
+
+ def test_do_not_consume_remaining_uses_when_get_token_fails(self):
+ trust_data = copy.deepcopy(self.sample_data)
+ trust_data['remaining_uses'] = 3
+ new_trust = self.create_trust(trust_data, self.trustor['name'])
+
+ for assigned_role in self.assigned_roles:
+ self.assignment_api.remove_role_from_user_and_project(
+ self.trustor['id'], self.tenant_bar['id'], assigned_role)
+
+ request_body = self.build_v2_token_request('TWO', 'two2', new_trust)
+ self.assertRaises(exception.Forbidden,
+ self.controller.authenticate, {}, request_body)
+
+ unscoped_token = self.get_unscoped_token(self.trustor['name'])
+ context = self._create_auth_context(
+ unscoped_token['access']['token']['id'])
+ trust = self.trust_controller.get_trust(context,
+ new_trust['id'])['trust']
+ self.assertEqual(3, trust['remaining_uses'])
+
+ def test_v2_trust_token_contains_trustor_user_id_and_impersonation(self):
+ new_trust = self.create_trust(self.sample_data, self.trustor['name'])
+ auth_response = self.fetch_v2_token_from_trust(new_trust)
+
+ self.assertEqual(new_trust['trustee_user_id'],
+ auth_response['access']['trust']['trustee_user_id'])
+ self.assertEqual(new_trust['trustor_user_id'],
+ auth_response['access']['trust']['trustor_user_id'])
+ self.assertEqual(new_trust['impersonation'],
+ auth_response['access']['trust']['impersonation'])
+ self.assertEqual(new_trust['id'],
+ auth_response['access']['trust']['id'])
+
+ validate_response = self.controller.validate_token(
+ context=dict(is_admin=True, query_string={}),
+ token_id=auth_response['access']['token']['id'])
+ self.assertEqual(
+ new_trust['trustee_user_id'],
+ validate_response['access']['trust']['trustee_user_id'])
+ self.assertEqual(
+ new_trust['trustor_user_id'],
+ validate_response['access']['trust']['trustor_user_id'])
+ self.assertEqual(
+ new_trust['impersonation'],
+ validate_response['access']['trust']['impersonation'])
+ self.assertEqual(
+ new_trust['id'],
+ validate_response['access']['trust']['id'])
+
+ def disable_user(self, user):
+ user['enabled'] = False
+ self.identity_api.update_user(user['id'], user)
+
+ def test_trust_get_token_fails_if_trustor_disabled(self):
+ new_trust = self.create_trust(self.sample_data, self.trustor['name'])
+ request_body = self.build_v2_token_request(self.trustee['name'],
+ self.trustee['password'],
+ new_trust)
+ self.disable_user(self.trustor)
+ self.assertRaises(
+ exception.Forbidden,
+ self.controller.authenticate, {}, request_body)
+
+ def test_trust_get_token_fails_if_trustee_disabled(self):
+ new_trust = self.create_trust(self.sample_data, self.trustor['name'])
+ request_body = self.build_v2_token_request(self.trustee['name'],
+ self.trustee['password'],
+ new_trust)
+ self.disable_user(self.trustee)
+ self.assertRaises(
+ exception.Unauthorized,
+ self.controller.authenticate, {}, request_body)
+
+
+class TokenExpirationTest(AuthTest):
+
+ @mock.patch.object(timeutils, 'utcnow')
+ def _maintain_token_expiration(self, mock_utcnow):
+ """Token expiration should be maintained after re-auth & validation."""
+ now = datetime.datetime.utcnow()
+ mock_utcnow.return_value = now
+
+ r = self.controller.authenticate(
+ {},
+ auth={
+ 'passwordCredentials': {
+ 'username': self.user_foo['name'],
+ 'password': self.user_foo['password']
+ }
+ })
+ unscoped_token_id = r['access']['token']['id']
+ original_expiration = r['access']['token']['expires']
+
+ mock_utcnow.return_value = now + datetime.timedelta(seconds=1)
+
+ r = self.controller.validate_token(
+ dict(is_admin=True, query_string={}),
+ token_id=unscoped_token_id)
+ self.assertEqual(original_expiration, r['access']['token']['expires'])
+
+ mock_utcnow.return_value = now + datetime.timedelta(seconds=2)
+
+ r = self.controller.authenticate(
+ {},
+ auth={
+ 'token': {
+ 'id': unscoped_token_id,
+ },
+ 'tenantId': self.tenant_bar['id'],
+ })
+ scoped_token_id = r['access']['token']['id']
+ self.assertEqual(original_expiration, r['access']['token']['expires'])
+
+ mock_utcnow.return_value = now + datetime.timedelta(seconds=3)
+
+ r = self.controller.validate_token(
+ dict(is_admin=True, query_string={}),
+ token_id=scoped_token_id)
+ self.assertEqual(original_expiration, r['access']['token']['expires'])
+
+ def test_maintain_uuid_token_expiration(self):
+ self.config_fixture.config(
+ group='token',
+ provider='keystone.token.providers.uuid.Provider')
+ self._maintain_token_expiration()
+
+
+class AuthCatalog(tests.SQLDriverOverrides, AuthTest):
+ """Tests for the catalog provided in the auth response."""
+
+ def config_files(self):
+ config_files = super(AuthCatalog, self).config_files()
+ # We need to use a backend that supports disabled endpoints, like the
+ # SQL backend.
+ config_files.append(tests.dirs.tests_conf('backend_sql.conf'))
+ return config_files
+
+ def _create_endpoints(self):
+ def create_region(**kwargs):
+ ref = {'id': uuid.uuid4().hex}
+ ref.update(kwargs)
+ self.catalog_api.create_region(ref)
+ return ref
+
+ def create_endpoint(service_id, region, **kwargs):
+ id_ = uuid.uuid4().hex
+ ref = {
+ 'id': id_,
+ 'interface': 'public',
+ 'region_id': region,
+ 'service_id': service_id,
+ 'url': 'http://localhost/%s' % uuid.uuid4().hex,
+ }
+ ref.update(kwargs)
+ self.catalog_api.create_endpoint(id_, ref)
+ return ref
+
+ # Create a service for use with the endpoints.
+ def create_service(**kwargs):
+ id_ = uuid.uuid4().hex
+ ref = {
+ 'id': id_,
+ 'name': uuid.uuid4().hex,
+ 'type': uuid.uuid4().hex,
+ }
+ ref.update(kwargs)
+ self.catalog_api.create_service(id_, ref)
+ return ref
+
+ enabled_service_ref = create_service(enabled=True)
+ disabled_service_ref = create_service(enabled=False)
+
+ region = create_region()
+
+ # Create endpoints
+ enabled_endpoint_ref = create_endpoint(
+ enabled_service_ref['id'], region['id'])
+ create_endpoint(
+ enabled_service_ref['id'], region['id'], enabled=False,
+ interface='internal')
+ create_endpoint(
+ disabled_service_ref['id'], region['id'])
+
+ return enabled_endpoint_ref
+
+ def test_auth_catalog_disabled_endpoint(self):
+ """On authenticate, get a catalog that excludes disabled endpoints."""
+ endpoint_ref = self._create_endpoints()
+
+ # Authenticate
+ body_dict = _build_user_auth(
+ username='FOO',
+ password='foo2',
+ tenant_name="BAR")
+
+ token = self.controller.authenticate({}, body_dict)
+
+ # Check the catalog
+ self.assertEqual(1, len(token['access']['serviceCatalog']))
+ endpoint = token['access']['serviceCatalog'][0]['endpoints'][0]
+ self.assertEqual(
+ 1, len(token['access']['serviceCatalog'][0]['endpoints']))
+
+ exp_endpoint = {
+ 'id': endpoint_ref['id'],
+ 'publicURL': endpoint_ref['url'],
+ 'region': endpoint_ref['region_id'],
+ }
+
+ self.assertEqual(exp_endpoint, endpoint)
+
+ def test_validate_catalog_disabled_endpoint(self):
+ """On validate, get back a catalog that excludes disabled endpoints."""
+ endpoint_ref = self._create_endpoints()
+
+ # Authenticate
+ body_dict = _build_user_auth(
+ username='FOO',
+ password='foo2',
+ tenant_name="BAR")
+
+ token = self.controller.authenticate({}, body_dict)
+
+ # Validate
+ token_id = token['access']['token']['id']
+ validate_ref = self.controller.validate_token(
+ dict(is_admin=True, query_string={}),
+ token_id=token_id)
+
+ # Check the catalog
+ self.assertEqual(1, len(token['access']['serviceCatalog']))
+ endpoint = validate_ref['access']['serviceCatalog'][0]['endpoints'][0]
+ self.assertEqual(
+ 1, len(token['access']['serviceCatalog'][0]['endpoints']))
+
+ exp_endpoint = {
+ 'id': endpoint_ref['id'],
+ 'publicURL': endpoint_ref['url'],
+ 'region': endpoint_ref['region_id'],
+ }
+
+ self.assertEqual(exp_endpoint, endpoint)
+
+
+class NonDefaultAuthTest(tests.TestCase):
+
+ def test_add_non_default_auth_method(self):
+ self.config_fixture.config(group='auth',
+ methods=['password', 'token', 'custom'])
+ config.setup_authentication()
+ self.assertTrue(hasattr(CONF.auth, 'custom'))
diff --git a/keystone-moon/keystone/tests/unit/test_auth_plugin.py b/keystone-moon/keystone/tests/unit/test_auth_plugin.py
new file mode 100644
index 00000000..11df95a5
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/test_auth_plugin.py
@@ -0,0 +1,220 @@
+# Copyright 2013 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import uuid
+
+import mock
+
+from keystone import auth
+from keystone import exception
+from keystone.tests import unit as tests
+
+
+# for testing purposes only
+METHOD_NAME = 'simple_challenge_response'
+EXPECTED_RESPONSE = uuid.uuid4().hex
+DEMO_USER_ID = uuid.uuid4().hex
+
+
+class SimpleChallengeResponse(auth.AuthMethodHandler):
+
+ method = METHOD_NAME
+
+ def authenticate(self, context, auth_payload, user_context):
+ if 'response' in auth_payload:
+ if auth_payload['response'] != EXPECTED_RESPONSE:
+ raise exception.Unauthorized('Wrong answer')
+ user_context['user_id'] = DEMO_USER_ID
+ else:
+ return {"challenge": "What's the name of your high school?"}
+
+
+class DuplicateAuthPlugin(SimpleChallengeResponse):
+ """Duplicate simple challenge response auth plugin."""
+
+
+class MismatchedAuthPlugin(SimpleChallengeResponse):
+ method = uuid.uuid4().hex
+
+
+class NoMethodAuthPlugin(auth.AuthMethodHandler):
+ """An auth plugin that does not supply a method attribute."""
+ def authenticate(self, context, auth_payload, auth_context):
+ pass
+
+
+class TestAuthPlugin(tests.SQLDriverOverrides, tests.TestCase):
+ def setUp(self):
+ super(TestAuthPlugin, self).setUp()
+ self.load_backends()
+
+ self.api = auth.controllers.Auth()
+
+ def config_overrides(self):
+ super(TestAuthPlugin, self).config_overrides()
+ method_opts = {
+ 'external': 'keystone.auth.plugins.external.DefaultDomain',
+ 'password': 'keystone.auth.plugins.password.Password',
+ 'token': 'keystone.auth.plugins.token.Token',
+ METHOD_NAME:
+ 'keystone.tests.unit.test_auth_plugin.SimpleChallengeResponse',
+ }
+
+ self.auth_plugin_config_override(
+ methods=['external', 'password', 'token', METHOD_NAME],
+ **method_opts)
+
+ def test_unsupported_auth_method(self):
+ method_name = uuid.uuid4().hex
+ auth_data = {'methods': [method_name]}
+ auth_data[method_name] = {'test': 'test'}
+ auth_data = {'identity': auth_data}
+ self.assertRaises(exception.AuthMethodNotSupported,
+ auth.controllers.AuthInfo.create,
+ None,
+ auth_data)
+
+ def test_addition_auth_steps(self):
+ auth_data = {'methods': [METHOD_NAME]}
+ auth_data[METHOD_NAME] = {
+ 'test': 'test'}
+ auth_data = {'identity': auth_data}
+ auth_info = auth.controllers.AuthInfo.create(None, auth_data)
+ auth_context = {'extras': {}, 'method_names': []}
+ try:
+ self.api.authenticate({'environment': {}}, auth_info, auth_context)
+ except exception.AdditionalAuthRequired as e:
+ self.assertIn('methods', e.authentication)
+ self.assertIn(METHOD_NAME, e.authentication['methods'])
+ self.assertIn(METHOD_NAME, e.authentication)
+ self.assertIn('challenge', e.authentication[METHOD_NAME])
+
+ # test correct response
+ auth_data = {'methods': [METHOD_NAME]}
+ auth_data[METHOD_NAME] = {
+ 'response': EXPECTED_RESPONSE}
+ auth_data = {'identity': auth_data}
+ auth_info = auth.controllers.AuthInfo.create(None, auth_data)
+ auth_context = {'extras': {}, 'method_names': []}
+ self.api.authenticate({'environment': {}}, auth_info, auth_context)
+ self.assertEqual(DEMO_USER_ID, auth_context['user_id'])
+
+ # test incorrect response
+ auth_data = {'methods': [METHOD_NAME]}
+ auth_data[METHOD_NAME] = {
+ 'response': uuid.uuid4().hex}
+ auth_data = {'identity': auth_data}
+ auth_info = auth.controllers.AuthInfo.create(None, auth_data)
+ auth_context = {'extras': {}, 'method_names': []}
+ self.assertRaises(exception.Unauthorized,
+ self.api.authenticate,
+ {'environment': {}},
+ auth_info,
+ auth_context)
+
+
+class TestAuthPluginDynamicOptions(TestAuthPlugin):
+ def config_overrides(self):
+ super(TestAuthPluginDynamicOptions, self).config_overrides()
+ # Clear the override for the [auth] ``methods`` option so it is
+ # possible to load the options from the config file.
+ self.config_fixture.conf.clear_override('methods', group='auth')
+
+ def config_files(self):
+ config_files = super(TestAuthPluginDynamicOptions, self).config_files()
+ config_files.append(tests.dirs.tests_conf('test_auth_plugin.conf'))
+ return config_files
+
+
+class TestInvalidAuthMethodRegistration(tests.TestCase):
+ def test_duplicate_auth_method_registration(self):
+ self.config_fixture.config(
+ group='auth',
+ methods=[
+ 'keystone.tests.unit.test_auth_plugin.SimpleChallengeResponse',
+ 'keystone.tests.unit.test_auth_plugin.DuplicateAuthPlugin'])
+ self.clear_auth_plugin_registry()
+ self.assertRaises(ValueError, auth.controllers.load_auth_methods)
+
+ def test_no_method_attribute_auth_method_by_class_name_registration(self):
+ self.config_fixture.config(
+ group='auth',
+ methods=['keystone.tests.unit.test_auth_plugin.NoMethodAuthPlugin']
+ )
+ self.clear_auth_plugin_registry()
+ self.assertRaises(ValueError, auth.controllers.load_auth_methods)
+
+
+class TestMapped(tests.TestCase):
+ def setUp(self):
+ super(TestMapped, self).setUp()
+ self.load_backends()
+
+ self.api = auth.controllers.Auth()
+
+ def config_files(self):
+ config_files = super(TestMapped, self).config_files()
+ config_files.append(tests.dirs.tests_conf('test_auth_plugin.conf'))
+ return config_files
+
+ def config_overrides(self):
+ # don't override configs so we can use test_auth_plugin.conf only
+ pass
+
+ def _test_mapped_invocation_with_method_name(self, method_name):
+ with mock.patch.object(auth.plugins.mapped.Mapped,
+ 'authenticate',
+ return_value=None) as authenticate:
+ context = {'environment': {}}
+ auth_data = {
+ 'identity': {
+ 'methods': [method_name],
+ method_name: {'protocol': method_name},
+ }
+ }
+ auth_info = auth.controllers.AuthInfo.create(context, auth_data)
+ auth_context = {'extras': {},
+ 'method_names': [],
+ 'user_id': uuid.uuid4().hex}
+ self.api.authenticate(context, auth_info, auth_context)
+ # make sure Mapped plugin got invoked with the correct payload
+ ((context, auth_payload, auth_context),
+ kwargs) = authenticate.call_args
+ self.assertEqual(method_name, auth_payload['protocol'])
+
+ def test_mapped_with_remote_user(self):
+ with mock.patch.object(auth.plugins.mapped.Mapped,
+ 'authenticate',
+ return_value=None) as authenticate:
+ # external plugin should fail and pass to mapped plugin
+ method_name = 'saml2'
+ auth_data = {'methods': [method_name]}
+ # put the method name in the payload so its easier to correlate
+ # method name with payload
+ auth_data[method_name] = {'protocol': method_name}
+ auth_data = {'identity': auth_data}
+ auth_info = auth.controllers.AuthInfo.create(None, auth_data)
+ auth_context = {'extras': {},
+ 'method_names': [],
+ 'user_id': uuid.uuid4().hex}
+ environment = {'environment': {'REMOTE_USER': 'foo@idp.com'}}
+ self.api.authenticate(environment, auth_info, auth_context)
+ # make sure Mapped plugin got invoked with the correct payload
+ ((context, auth_payload, auth_context),
+ kwargs) = authenticate.call_args
+ self.assertEqual(auth_payload['protocol'], method_name)
+
+ def test_supporting_multiple_methods(self):
+ for method_name in ['saml2', 'openid', 'x509']:
+ self._test_mapped_invocation_with_method_name(method_name)
diff --git a/keystone-moon/keystone/tests/unit/test_backend.py b/keystone-moon/keystone/tests/unit/test_backend.py
new file mode 100644
index 00000000..6cf06494
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/test_backend.py
@@ -0,0 +1,5741 @@
+# Copyright 2012 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import copy
+import datetime
+import hashlib
+import uuid
+
+from keystoneclient.common import cms
+import mock
+from oslo_config import cfg
+from oslo_utils import timeutils
+import six
+from testtools import matchers
+
+from keystone.catalog import core
+from keystone.common import driver_hints
+from keystone import exception
+from keystone.tests import unit as tests
+from keystone.tests.unit import default_fixtures
+from keystone.tests.unit import filtering
+from keystone.tests.unit import utils as test_utils
+from keystone.token import provider
+
+
+CONF = cfg.CONF
+DEFAULT_DOMAIN_ID = CONF.identity.default_domain_id
+NULL_OBJECT = object()
+
+
+class IdentityTests(object):
+ def _get_domain_fixture(self):
+ domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.resource_api.create_domain(domain['id'], domain)
+ return domain
+
+ def _set_domain_scope(self, domain_id):
+ # We only provide a domain scope if we have multiple drivers
+ if CONF.identity.domain_specific_drivers_enabled:
+ return domain_id
+
+ def test_project_add_and_remove_user_role(self):
+ user_ids = self.assignment_api.list_user_ids_for_project(
+ self.tenant_bar['id'])
+ self.assertNotIn(self.user_two['id'], user_ids)
+
+ self.assignment_api.add_role_to_user_and_project(
+ tenant_id=self.tenant_bar['id'],
+ user_id=self.user_two['id'],
+ role_id=self.role_other['id'])
+ user_ids = self.assignment_api.list_user_ids_for_project(
+ self.tenant_bar['id'])
+ self.assertIn(self.user_two['id'], user_ids)
+
+ self.assignment_api.remove_role_from_user_and_project(
+ tenant_id=self.tenant_bar['id'],
+ user_id=self.user_two['id'],
+ role_id=self.role_other['id'])
+
+ user_ids = self.assignment_api.list_user_ids_for_project(
+ self.tenant_bar['id'])
+ self.assertNotIn(self.user_two['id'], user_ids)
+
+ def test_remove_user_role_not_assigned(self):
+ # Expect failure if attempt to remove a role that was never assigned to
+ # the user.
+ self.assertRaises(exception.RoleNotFound,
+ self.assignment_api.
+ remove_role_from_user_and_project,
+ tenant_id=self.tenant_bar['id'],
+ user_id=self.user_two['id'],
+ role_id=self.role_other['id'])
+
+ def test_authenticate_bad_user(self):
+ self.assertRaises(AssertionError,
+ self.identity_api.authenticate,
+ context={},
+ user_id=uuid.uuid4().hex,
+ password=self.user_foo['password'])
+
+ def test_authenticate_bad_password(self):
+ self.assertRaises(AssertionError,
+ self.identity_api.authenticate,
+ context={},
+ user_id=self.user_foo['id'],
+ password=uuid.uuid4().hex)
+
+ def test_authenticate(self):
+ user_ref = self.identity_api.authenticate(
+ context={},
+ user_id=self.user_sna['id'],
+ password=self.user_sna['password'])
+ # NOTE(termie): the password field is left in user_sna to make
+ # it easier to authenticate in tests, but should
+ # not be returned by the api
+ self.user_sna.pop('password')
+ self.user_sna['enabled'] = True
+ self.assertDictEqual(user_ref, self.user_sna)
+
+ def test_authenticate_and_get_roles_no_metadata(self):
+ user = {
+ 'name': 'NO_META',
+ 'domain_id': DEFAULT_DOMAIN_ID,
+ 'password': 'no_meta2',
+ }
+ new_user = self.identity_api.create_user(user)
+ self.assignment_api.add_user_to_project(self.tenant_baz['id'],
+ new_user['id'])
+ user_ref = self.identity_api.authenticate(
+ context={},
+ user_id=new_user['id'],
+ password=user['password'])
+ self.assertNotIn('password', user_ref)
+ # NOTE(termie): the password field is left in user_sna to make
+ # it easier to authenticate in tests, but should
+ # not be returned by the api
+ user.pop('password')
+ self.assertDictContainsSubset(user, user_ref)
+ role_list = self.assignment_api.get_roles_for_user_and_project(
+ new_user['id'], self.tenant_baz['id'])
+ self.assertEqual(1, len(role_list))
+ self.assertIn(CONF.member_role_id, role_list)
+
+ def test_authenticate_if_no_password_set(self):
+ id_ = uuid.uuid4().hex
+ user = {
+ 'name': uuid.uuid4().hex,
+ 'domain_id': DEFAULT_DOMAIN_ID,
+ }
+ self.identity_api.create_user(user)
+
+ self.assertRaises(AssertionError,
+ self.identity_api.authenticate,
+ context={},
+ user_id=id_,
+ password='password')
+
+ def test_create_unicode_user_name(self):
+ unicode_name = u'name \u540d\u5b57'
+ user = {'name': unicode_name,
+ 'domain_id': DEFAULT_DOMAIN_ID,
+ 'password': uuid.uuid4().hex}
+ ref = self.identity_api.create_user(user)
+ self.assertEqual(unicode_name, ref['name'])
+
+ def test_get_project(self):
+ tenant_ref = self.resource_api.get_project(self.tenant_bar['id'])
+ self.assertDictEqual(tenant_ref, self.tenant_bar)
+
+ def test_get_project_404(self):
+ self.assertRaises(exception.ProjectNotFound,
+ self.resource_api.get_project,
+ uuid.uuid4().hex)
+
+ def test_get_project_by_name(self):
+ tenant_ref = self.resource_api.get_project_by_name(
+ self.tenant_bar['name'],
+ DEFAULT_DOMAIN_ID)
+ self.assertDictEqual(tenant_ref, self.tenant_bar)
+
+ def test_get_project_by_name_404(self):
+ self.assertRaises(exception.ProjectNotFound,
+ self.resource_api.get_project_by_name,
+ uuid.uuid4().hex,
+ DEFAULT_DOMAIN_ID)
+
+ def test_list_user_ids_for_project(self):
+ user_ids = self.assignment_api.list_user_ids_for_project(
+ self.tenant_baz['id'])
+ self.assertEqual(2, len(user_ids))
+ self.assertIn(self.user_two['id'], user_ids)
+ self.assertIn(self.user_badguy['id'], user_ids)
+
+ def test_list_user_ids_for_project_no_duplicates(self):
+ # Create user
+ user_ref = {
+ 'name': uuid.uuid4().hex,
+ 'domain_id': DEFAULT_DOMAIN_ID,
+ 'password': uuid.uuid4().hex,
+ 'enabled': True}
+ user_ref = self.identity_api.create_user(user_ref)
+ # Create project
+ project_ref = {
+ 'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'domain_id': DEFAULT_DOMAIN_ID}
+ self.resource_api.create_project(
+ project_ref['id'], project_ref)
+ # Create 2 roles and give user each role in project
+ for i in range(2):
+ role_ref = {
+ 'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex}
+ self.role_api.create_role(role_ref['id'], role_ref)
+ self.assignment_api.add_role_to_user_and_project(
+ user_id=user_ref['id'],
+ tenant_id=project_ref['id'],
+ role_id=role_ref['id'])
+ # Get the list of user_ids in project
+ user_ids = self.assignment_api.list_user_ids_for_project(
+ project_ref['id'])
+ # Ensure the user is only returned once
+ self.assertEqual(1, len(user_ids))
+
+ def test_get_project_user_ids_404(self):
+ self.assertRaises(exception.ProjectNotFound,
+ self.assignment_api.list_user_ids_for_project,
+ uuid.uuid4().hex)
+
+ def test_get_user(self):
+ user_ref = self.identity_api.get_user(self.user_foo['id'])
+ # NOTE(termie): the password field is left in user_foo to make
+ # it easier to authenticate in tests, but should
+ # not be returned by the api
+ self.user_foo.pop('password')
+ self.assertDictEqual(user_ref, self.user_foo)
+
+ @tests.skip_if_cache_disabled('identity')
+ def test_cache_layer_get_user(self):
+ user = {
+ 'name': uuid.uuid4().hex.lower(),
+ 'domain_id': DEFAULT_DOMAIN_ID
+ }
+ self.identity_api.create_user(user)
+ ref = self.identity_api.get_user_by_name(user['name'],
+ user['domain_id'])
+ # cache the result.
+ self.identity_api.get_user(ref['id'])
+ # delete bypassing identity api
+ domain_id, driver, entity_id = (
+ self.identity_api._get_domain_driver_and_entity_id(ref['id']))
+ driver.delete_user(entity_id)
+
+ self.assertDictEqual(ref, self.identity_api.get_user(ref['id']))
+ self.identity_api.get_user.invalidate(self.identity_api, ref['id'])
+ self.assertRaises(exception.UserNotFound,
+ self.identity_api.get_user, ref['id'])
+ user = {
+ 'name': uuid.uuid4().hex.lower(),
+ 'domain_id': DEFAULT_DOMAIN_ID
+ }
+ self.identity_api.create_user(user)
+ ref = self.identity_api.get_user_by_name(user['name'],
+ user['domain_id'])
+ user['description'] = uuid.uuid4().hex
+ # cache the result.
+ self.identity_api.get_user(ref['id'])
+ # update using identity api and get back updated user.
+ user_updated = self.identity_api.update_user(ref['id'], user)
+ self.assertDictContainsSubset(self.identity_api.get_user(ref['id']),
+ user_updated)
+ self.assertDictContainsSubset(
+ self.identity_api.get_user_by_name(ref['name'], ref['domain_id']),
+ user_updated)
+
+ def test_get_user_404(self):
+ self.assertRaises(exception.UserNotFound,
+ self.identity_api.get_user,
+ uuid.uuid4().hex)
+
+ def test_get_user_by_name(self):
+ user_ref = self.identity_api.get_user_by_name(
+ self.user_foo['name'], DEFAULT_DOMAIN_ID)
+ # NOTE(termie): the password field is left in user_foo to make
+ # it easier to authenticate in tests, but should
+ # not be returned by the api
+ self.user_foo.pop('password')
+ self.assertDictEqual(user_ref, self.user_foo)
+
+ @tests.skip_if_cache_disabled('identity')
+ def test_cache_layer_get_user_by_name(self):
+ user = {
+ 'name': uuid.uuid4().hex.lower(),
+ 'domain_id': DEFAULT_DOMAIN_ID
+ }
+ self.identity_api.create_user(user)
+ ref = self.identity_api.get_user_by_name(user['name'],
+ user['domain_id'])
+ # delete bypassing the identity api.
+ domain_id, driver, entity_id = (
+ self.identity_api._get_domain_driver_and_entity_id(ref['id']))
+ driver.delete_user(entity_id)
+
+ self.assertDictEqual(ref, self.identity_api.get_user_by_name(
+ user['name'], DEFAULT_DOMAIN_ID))
+ self.identity_api.get_user_by_name.invalidate(
+ self.identity_api, user['name'], DEFAULT_DOMAIN_ID)
+ self.assertRaises(exception.UserNotFound,
+ self.identity_api.get_user_by_name,
+ user['name'], DEFAULT_DOMAIN_ID)
+ user = {
+ 'name': uuid.uuid4().hex.lower(),
+ 'domain_id': DEFAULT_DOMAIN_ID
+ }
+ self.identity_api.create_user(user)
+ ref = self.identity_api.get_user_by_name(user['name'],
+ user['domain_id'])
+ user['description'] = uuid.uuid4().hex
+ user_updated = self.identity_api.update_user(ref['id'], user)
+ self.assertDictContainsSubset(self.identity_api.get_user(ref['id']),
+ user_updated)
+ self.assertDictContainsSubset(
+ self.identity_api.get_user_by_name(ref['name'], ref['domain_id']),
+ user_updated)
+
+ def test_get_user_by_name_404(self):
+ self.assertRaises(exception.UserNotFound,
+ self.identity_api.get_user_by_name,
+ uuid.uuid4().hex,
+ DEFAULT_DOMAIN_ID)
+
+ def test_create_duplicate_user_name_fails(self):
+ user = {'name': 'fake1',
+ 'domain_id': DEFAULT_DOMAIN_ID,
+ 'password': 'fakepass',
+ 'tenants': ['bar']}
+ user = self.identity_api.create_user(user)
+ self.assertRaises(exception.Conflict,
+ self.identity_api.create_user,
+ user)
+
+ def test_create_duplicate_user_name_in_different_domains(self):
+ new_domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.resource_api.create_domain(new_domain['id'], new_domain)
+ user1 = {'name': uuid.uuid4().hex,
+ 'domain_id': DEFAULT_DOMAIN_ID,
+ 'password': uuid.uuid4().hex}
+ user2 = {'name': user1['name'],
+ 'domain_id': new_domain['id'],
+ 'password': uuid.uuid4().hex}
+ self.identity_api.create_user(user1)
+ self.identity_api.create_user(user2)
+
+ def test_move_user_between_domains(self):
+ domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.resource_api.create_domain(domain1['id'], domain1)
+ domain2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.resource_api.create_domain(domain2['id'], domain2)
+ user = {'name': uuid.uuid4().hex,
+ 'domain_id': domain1['id'],
+ 'password': uuid.uuid4().hex}
+ user = self.identity_api.create_user(user)
+ user['domain_id'] = domain2['id']
+ self.identity_api.update_user(user['id'], user)
+
+ def test_move_user_between_domains_with_clashing_names_fails(self):
+ domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.resource_api.create_domain(domain1['id'], domain1)
+ domain2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.resource_api.create_domain(domain2['id'], domain2)
+ # First, create a user in domain1
+ user1 = {'name': uuid.uuid4().hex,
+ 'domain_id': domain1['id'],
+ 'password': uuid.uuid4().hex}
+ user1 = self.identity_api.create_user(user1)
+ # Now create a user in domain2 with a potentially clashing
+ # name - which should work since we have domain separation
+ user2 = {'name': user1['name'],
+ 'domain_id': domain2['id'],
+ 'password': uuid.uuid4().hex}
+ user2 = self.identity_api.create_user(user2)
+ # Now try and move user1 into the 2nd domain - which should
+ # fail since the names clash
+ user1['domain_id'] = domain2['id']
+ self.assertRaises(exception.Conflict,
+ self.identity_api.update_user,
+ user1['id'],
+ user1)
+
+ def test_rename_duplicate_user_name_fails(self):
+ user1 = {'name': 'fake1',
+ 'domain_id': DEFAULT_DOMAIN_ID,
+ 'password': 'fakepass',
+ 'tenants': ['bar']}
+ user2 = {'name': 'fake2',
+ 'domain_id': DEFAULT_DOMAIN_ID,
+ 'password': 'fakepass',
+ 'tenants': ['bar']}
+ self.identity_api.create_user(user1)
+ user2 = self.identity_api.create_user(user2)
+ user2['name'] = 'fake1'
+ self.assertRaises(exception.Conflict,
+ self.identity_api.update_user,
+ user2['id'],
+ user2)
+
+ def test_update_user_id_fails(self):
+ user = {'name': 'fake1',
+ 'domain_id': DEFAULT_DOMAIN_ID,
+ 'password': 'fakepass',
+ 'tenants': ['bar']}
+ user = self.identity_api.create_user(user)
+ original_id = user['id']
+ user['id'] = 'fake2'
+ self.assertRaises(exception.ValidationError,
+ self.identity_api.update_user,
+ original_id,
+ user)
+ user_ref = self.identity_api.get_user(original_id)
+ self.assertEqual(original_id, user_ref['id'])
+ self.assertRaises(exception.UserNotFound,
+ self.identity_api.get_user,
+ 'fake2')
+
+ def test_create_duplicate_project_id_fails(self):
+ tenant = {'id': 'fake1', 'name': 'fake1',
+ 'domain_id': DEFAULT_DOMAIN_ID}
+ self.resource_api.create_project('fake1', tenant)
+ tenant['name'] = 'fake2'
+ self.assertRaises(exception.Conflict,
+ self.resource_api.create_project,
+ 'fake1',
+ tenant)
+
+ def test_create_duplicate_project_name_fails(self):
+ tenant = {'id': 'fake1', 'name': 'fake',
+ 'domain_id': DEFAULT_DOMAIN_ID}
+ self.resource_api.create_project('fake1', tenant)
+ tenant['id'] = 'fake2'
+ self.assertRaises(exception.Conflict,
+ self.resource_api.create_project,
+ 'fake1',
+ tenant)
+
+ def test_create_duplicate_project_name_in_different_domains(self):
+ new_domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.resource_api.create_domain(new_domain['id'], new_domain)
+ tenant1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
+ 'domain_id': DEFAULT_DOMAIN_ID}
+ tenant2 = {'id': uuid.uuid4().hex, 'name': tenant1['name'],
+ 'domain_id': new_domain['id']}
+ self.resource_api.create_project(tenant1['id'], tenant1)
+ self.resource_api.create_project(tenant2['id'], tenant2)
+
+ def test_move_project_between_domains(self):
+ domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.resource_api.create_domain(domain1['id'], domain1)
+ domain2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.resource_api.create_domain(domain2['id'], domain2)
+ project = {'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'domain_id': domain1['id']}
+ self.resource_api.create_project(project['id'], project)
+ project['domain_id'] = domain2['id']
+ self.resource_api.update_project(project['id'], project)
+
+ def test_move_project_between_domains_with_clashing_names_fails(self):
+ domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.resource_api.create_domain(domain1['id'], domain1)
+ domain2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.resource_api.create_domain(domain2['id'], domain2)
+ # First, create a project in domain1
+ project1 = {'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'domain_id': domain1['id']}
+ self.resource_api.create_project(project1['id'], project1)
+ # Now create a project in domain2 with a potentially clashing
+ # name - which should work since we have domain separation
+ project2 = {'id': uuid.uuid4().hex,
+ 'name': project1['name'],
+ 'domain_id': domain2['id']}
+ self.resource_api.create_project(project2['id'], project2)
+ # Now try and move project1 into the 2nd domain - which should
+ # fail since the names clash
+ project1['domain_id'] = domain2['id']
+ self.assertRaises(exception.Conflict,
+ self.resource_api.update_project,
+ project1['id'],
+ project1)
+
+ def test_rename_duplicate_project_name_fails(self):
+ tenant1 = {'id': 'fake1', 'name': 'fake1',
+ 'domain_id': DEFAULT_DOMAIN_ID}
+ tenant2 = {'id': 'fake2', 'name': 'fake2',
+ 'domain_id': DEFAULT_DOMAIN_ID}
+ self.resource_api.create_project('fake1', tenant1)
+ self.resource_api.create_project('fake2', tenant2)
+ tenant2['name'] = 'fake1'
+ self.assertRaises(exception.Error,
+ self.resource_api.update_project,
+ 'fake2',
+ tenant2)
+
+ def test_update_project_id_does_nothing(self):
+ tenant = {'id': 'fake1', 'name': 'fake1',
+ 'domain_id': DEFAULT_DOMAIN_ID}
+ self.resource_api.create_project('fake1', tenant)
+ tenant['id'] = 'fake2'
+ self.resource_api.update_project('fake1', tenant)
+ tenant_ref = self.resource_api.get_project('fake1')
+ self.assertEqual('fake1', tenant_ref['id'])
+ self.assertRaises(exception.ProjectNotFound,
+ self.resource_api.get_project,
+ 'fake2')
+
+ def test_list_role_assignments_unfiltered(self):
+ """Test for unfiltered listing role assignments.
+
+ Test Plan:
+
+ - Create a domain, with a user, group & project
+ - Find how many role assignments already exist (from default
+ fixtures)
+ - Create a grant of each type (user/group on project/domain)
+ - Check the number of assignments has gone up by 4 and that
+ the entries we added are in the list returned
+ - Check that if we list assignments by role_id, then we get back
+ assignments that only contain that role.
+
+ """
+ new_domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.resource_api.create_domain(new_domain['id'], new_domain)
+ new_user = {'name': uuid.uuid4().hex, 'password': uuid.uuid4().hex,
+ 'enabled': True, 'domain_id': new_domain['id']}
+ new_user = self.identity_api.create_user(new_user)
+ new_group = {'domain_id': new_domain['id'], 'name': uuid.uuid4().hex}
+ new_group = self.identity_api.create_group(new_group)
+ new_project = {'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'domain_id': new_domain['id']}
+ self.resource_api.create_project(new_project['id'], new_project)
+
+ # First check how many role grants already exist
+ existing_assignments = len(self.assignment_api.list_role_assignments())
+ existing_assignments_for_role = len(
+ self.assignment_api.list_role_assignments_for_role(
+ role_id='admin'))
+
+ # Now create the grants (roles are defined in default_fixtures)
+ self.assignment_api.create_grant(user_id=new_user['id'],
+ domain_id=new_domain['id'],
+ role_id='member')
+ self.assignment_api.create_grant(user_id=new_user['id'],
+ project_id=new_project['id'],
+ role_id='other')
+ self.assignment_api.create_grant(group_id=new_group['id'],
+ domain_id=new_domain['id'],
+ role_id='admin')
+ self.assignment_api.create_grant(group_id=new_group['id'],
+ project_id=new_project['id'],
+ role_id='admin')
+
+ # Read back the full list of assignments - check it is gone up by 4
+ assignment_list = self.assignment_api.list_role_assignments()
+ self.assertEqual(existing_assignments + 4, len(assignment_list))
+
+ # Now check that each of our four new entries are in the list
+ self.assertIn(
+ {'user_id': new_user['id'], 'domain_id': new_domain['id'],
+ 'role_id': 'member'},
+ assignment_list)
+ self.assertIn(
+ {'user_id': new_user['id'], 'project_id': new_project['id'],
+ 'role_id': 'other'},
+ assignment_list)
+ self.assertIn(
+ {'group_id': new_group['id'], 'domain_id': new_domain['id'],
+ 'role_id': 'admin'},
+ assignment_list)
+ self.assertIn(
+ {'group_id': new_group['id'], 'project_id': new_project['id'],
+ 'role_id': 'admin'},
+ assignment_list)
+
+ # Read back the list of assignments for just the admin role, checking
+ # this only goes up by two.
+ assignment_list = self.assignment_api.list_role_assignments_for_role(
+ role_id='admin')
+ self.assertEqual(existing_assignments_for_role + 2,
+ len(assignment_list))
+
+ # Now check that each of our two new entries are in the list
+ self.assertIn(
+ {'group_id': new_group['id'], 'domain_id': new_domain['id'],
+ 'role_id': 'admin'},
+ assignment_list)
+ self.assertIn(
+ {'group_id': new_group['id'], 'project_id': new_project['id'],
+ 'role_id': 'admin'},
+ assignment_list)
+
+ def test_list_group_role_assignment(self):
+ # When a group role assignment is created and the role assignments are
+ # listed then the group role assignment is included in the list.
+
+ MEMBER_ROLE_ID = 'member'
+
+ def get_member_assignments():
+ assignments = self.assignment_api.list_role_assignments()
+ return filter(lambda x: x['role_id'] == MEMBER_ROLE_ID,
+ assignments)
+
+ orig_member_assignments = get_member_assignments()
+
+ # Create a group.
+ new_group = {
+ 'domain_id': DEFAULT_DOMAIN_ID,
+ 'name': self.getUniqueString(prefix='tlgra')}
+ new_group = self.identity_api.create_group(new_group)
+
+ # Create a project.
+ new_project = {
+ 'id': uuid.uuid4().hex,
+ 'name': self.getUniqueString(prefix='tlgra'),
+ 'domain_id': DEFAULT_DOMAIN_ID}
+ self.resource_api.create_project(new_project['id'], new_project)
+
+ # Assign a role to the group.
+ self.assignment_api.create_grant(
+ group_id=new_group['id'], project_id=new_project['id'],
+ role_id=MEMBER_ROLE_ID)
+
+ # List role assignments
+ new_member_assignments = get_member_assignments()
+
+ expected_member_assignments = orig_member_assignments + [{
+ 'group_id': new_group['id'], 'project_id': new_project['id'],
+ 'role_id': MEMBER_ROLE_ID}]
+ self.assertThat(new_member_assignments,
+ matchers.Equals(expected_member_assignments))
+
+ def test_list_role_assignments_bad_role(self):
+ assignment_list = self.assignment_api.list_role_assignments_for_role(
+ role_id=uuid.uuid4().hex)
+ self.assertEqual([], assignment_list)
+
+ def test_add_duplicate_role_grant(self):
+ roles_ref = self.assignment_api.get_roles_for_user_and_project(
+ self.user_foo['id'], self.tenant_bar['id'])
+ self.assertNotIn(self.role_admin['id'], roles_ref)
+ self.assignment_api.add_role_to_user_and_project(
+ self.user_foo['id'], self.tenant_bar['id'], self.role_admin['id'])
+ self.assertRaises(exception.Conflict,
+ self.assignment_api.add_role_to_user_and_project,
+ self.user_foo['id'],
+ self.tenant_bar['id'],
+ self.role_admin['id'])
+
+ def test_get_role_by_user_and_project_with_user_in_group(self):
+ """Test for get role by user and project, user was added into a group.
+
+ Test Plan:
+
+ - Create a user, a project & a group, add this user to group
+ - Create roles and grant them to user and project
+ - Check the role list get by the user and project was as expected
+
+ """
+ user_ref = {'name': uuid.uuid4().hex,
+ 'domain_id': DEFAULT_DOMAIN_ID,
+ 'password': uuid.uuid4().hex,
+ 'enabled': True}
+ user_ref = self.identity_api.create_user(user_ref)
+
+ project_ref = {'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'domain_id': DEFAULT_DOMAIN_ID}
+ self.resource_api.create_project(project_ref['id'], project_ref)
+
+ group = {'name': uuid.uuid4().hex,
+ 'domain_id': DEFAULT_DOMAIN_ID}
+ group_id = self.identity_api.create_group(group)['id']
+ self.identity_api.add_user_to_group(user_ref['id'], group_id)
+
+ role_ref_list = []
+ for i in range(2):
+ role_ref = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.role_api.create_role(role_ref['id'], role_ref)
+ role_ref_list.append(role_ref)
+
+ self.assignment_api.add_role_to_user_and_project(
+ user_id=user_ref['id'],
+ tenant_id=project_ref['id'],
+ role_id=role_ref['id'])
+
+ role_list = self.assignment_api.get_roles_for_user_and_project(
+ user_id=user_ref['id'],
+ tenant_id=project_ref['id'])
+
+ self.assertEqual(set(role_list),
+ set([r['id'] for r in role_ref_list]))
+
+ def test_get_role_by_user_and_project(self):
+ roles_ref = self.assignment_api.get_roles_for_user_and_project(
+ self.user_foo['id'], self.tenant_bar['id'])
+ self.assertNotIn(self.role_admin['id'], roles_ref)
+ self.assignment_api.add_role_to_user_and_project(
+ self.user_foo['id'], self.tenant_bar['id'], self.role_admin['id'])
+ roles_ref = self.assignment_api.get_roles_for_user_and_project(
+ self.user_foo['id'], self.tenant_bar['id'])
+ self.assertIn(self.role_admin['id'], roles_ref)
+ self.assertNotIn('member', roles_ref)
+
+ self.assignment_api.add_role_to_user_and_project(
+ self.user_foo['id'], self.tenant_bar['id'], 'member')
+ roles_ref = self.assignment_api.get_roles_for_user_and_project(
+ self.user_foo['id'], self.tenant_bar['id'])
+ self.assertIn(self.role_admin['id'], roles_ref)
+ self.assertIn('member', roles_ref)
+
+ def test_get_roles_for_user_and_domain(self):
+ """Test for getting roles for user on a domain.
+
+ Test Plan:
+
+ - Create a domain, with 2 users
+ - Check no roles yet exit
+ - Give user1 two roles on the domain, user2 one role
+ - Get roles on user1 and the domain - maybe sure we only
+ get back the 2 roles on user1
+ - Delete both roles from user1
+ - Check we get no roles back for user1 on domain
+
+ """
+ new_domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.resource_api.create_domain(new_domain['id'], new_domain)
+ new_user1 = {'name': uuid.uuid4().hex, 'password': uuid.uuid4().hex,
+ 'enabled': True, 'domain_id': new_domain['id']}
+ new_user1 = self.identity_api.create_user(new_user1)
+ new_user2 = {'name': uuid.uuid4().hex, 'password': uuid.uuid4().hex,
+ 'enabled': True, 'domain_id': new_domain['id']}
+ new_user2 = self.identity_api.create_user(new_user2)
+ roles_ref = self.assignment_api.list_grants(
+ user_id=new_user1['id'],
+ domain_id=new_domain['id'])
+ self.assertEqual(0, len(roles_ref))
+ # Now create the grants (roles are defined in default_fixtures)
+ self.assignment_api.create_grant(user_id=new_user1['id'],
+ domain_id=new_domain['id'],
+ role_id='member')
+ self.assignment_api.create_grant(user_id=new_user1['id'],
+ domain_id=new_domain['id'],
+ role_id='other')
+ self.assignment_api.create_grant(user_id=new_user2['id'],
+ domain_id=new_domain['id'],
+ role_id='admin')
+ # Read back the roles for user1 on domain
+ roles_ids = self.assignment_api.get_roles_for_user_and_domain(
+ new_user1['id'], new_domain['id'])
+ self.assertEqual(2, len(roles_ids))
+ self.assertIn(self.role_member['id'], roles_ids)
+ self.assertIn(self.role_other['id'], roles_ids)
+
+ # Now delete both grants for user1
+ self.assignment_api.delete_grant(user_id=new_user1['id'],
+ domain_id=new_domain['id'],
+ role_id='member')
+ self.assignment_api.delete_grant(user_id=new_user1['id'],
+ domain_id=new_domain['id'],
+ role_id='other')
+ roles_ref = self.assignment_api.list_grants(
+ user_id=new_user1['id'],
+ domain_id=new_domain['id'])
+ self.assertEqual(0, len(roles_ref))
+
+ def test_get_roles_for_user_and_domain_404(self):
+ """Test errors raised when getting roles for user on a domain.
+
+ Test Plan:
+
+ - Check non-existing user gives UserNotFound
+ - Check non-existing domain gives DomainNotFound
+
+ """
+ new_domain = self._get_domain_fixture()
+ new_user1 = {'name': uuid.uuid4().hex, 'password': uuid.uuid4().hex,
+ 'enabled': True, 'domain_id': new_domain['id']}
+ new_user1 = self.identity_api.create_user(new_user1)
+
+ self.assertRaises(exception.UserNotFound,
+ self.assignment_api.get_roles_for_user_and_domain,
+ uuid.uuid4().hex,
+ new_domain['id'])
+
+ self.assertRaises(exception.DomainNotFound,
+ self.assignment_api.get_roles_for_user_and_domain,
+ new_user1['id'],
+ uuid.uuid4().hex)
+
+ def test_get_roles_for_user_and_project_404(self):
+ self.assertRaises(exception.UserNotFound,
+ self.assignment_api.get_roles_for_user_and_project,
+ uuid.uuid4().hex,
+ self.tenant_bar['id'])
+
+ self.assertRaises(exception.ProjectNotFound,
+ self.assignment_api.get_roles_for_user_and_project,
+ self.user_foo['id'],
+ uuid.uuid4().hex)
+
+ def test_add_role_to_user_and_project_404(self):
+ self.assertRaises(exception.ProjectNotFound,
+ self.assignment_api.add_role_to_user_and_project,
+ self.user_foo['id'],
+ uuid.uuid4().hex,
+ self.role_admin['id'])
+
+ self.assertRaises(exception.RoleNotFound,
+ self.assignment_api.add_role_to_user_and_project,
+ self.user_foo['id'],
+ self.tenant_bar['id'],
+ uuid.uuid4().hex)
+
+ def test_add_role_to_user_and_project_no_user(self):
+ # If add_role_to_user_and_project and the user doesn't exist, then
+ # no error.
+ user_id_not_exist = uuid.uuid4().hex
+ self.assignment_api.add_role_to_user_and_project(
+ user_id_not_exist, self.tenant_bar['id'], self.role_admin['id'])
+
+ def test_remove_role_from_user_and_project(self):
+ self.assignment_api.add_role_to_user_and_project(
+ self.user_foo['id'], self.tenant_bar['id'], 'member')
+ self.assignment_api.remove_role_from_user_and_project(
+ self.user_foo['id'], self.tenant_bar['id'], 'member')
+ roles_ref = self.assignment_api.get_roles_for_user_and_project(
+ self.user_foo['id'], self.tenant_bar['id'])
+ self.assertNotIn('member', roles_ref)
+ self.assertRaises(exception.NotFound,
+ self.assignment_api.
+ remove_role_from_user_and_project,
+ self.user_foo['id'],
+ self.tenant_bar['id'],
+ 'member')
+
+ def test_get_role_grant_by_user_and_project(self):
+ roles_ref = self.assignment_api.list_grants(
+ user_id=self.user_foo['id'],
+ project_id=self.tenant_bar['id'])
+ self.assertEqual(1, len(roles_ref))
+ self.assignment_api.create_grant(user_id=self.user_foo['id'],
+ project_id=self.tenant_bar['id'],
+ role_id=self.role_admin['id'])
+ roles_ref = self.assignment_api.list_grants(
+ user_id=self.user_foo['id'],
+ project_id=self.tenant_bar['id'])
+ self.assertIn(self.role_admin['id'],
+ [role_ref['id'] for role_ref in roles_ref])
+
+ self.assignment_api.create_grant(user_id=self.user_foo['id'],
+ project_id=self.tenant_bar['id'],
+ role_id='member')
+ roles_ref = self.assignment_api.list_grants(
+ user_id=self.user_foo['id'],
+ project_id=self.tenant_bar['id'])
+
+ roles_ref_ids = []
+ for ref in roles_ref:
+ roles_ref_ids.append(ref['id'])
+ self.assertIn(self.role_admin['id'], roles_ref_ids)
+ self.assertIn('member', roles_ref_ids)
+
+ def test_remove_role_grant_from_user_and_project(self):
+ self.assignment_api.create_grant(user_id=self.user_foo['id'],
+ project_id=self.tenant_baz['id'],
+ role_id='member')
+ roles_ref = self.assignment_api.list_grants(
+ user_id=self.user_foo['id'],
+ project_id=self.tenant_baz['id'])
+ self.assertDictEqual(roles_ref[0], self.role_member)
+
+ self.assignment_api.delete_grant(user_id=self.user_foo['id'],
+ project_id=self.tenant_baz['id'],
+ role_id='member')
+ roles_ref = self.assignment_api.list_grants(
+ user_id=self.user_foo['id'],
+ project_id=self.tenant_baz['id'])
+ self.assertEqual(0, len(roles_ref))
+ self.assertRaises(exception.RoleAssignmentNotFound,
+ self.assignment_api.delete_grant,
+ user_id=self.user_foo['id'],
+ project_id=self.tenant_baz['id'],
+ role_id='member')
+
+ def test_get_role_assignment_by_project_not_found(self):
+ self.assertRaises(exception.RoleAssignmentNotFound,
+ self.assignment_api.check_grant_role_id,
+ user_id=self.user_foo['id'],
+ project_id=self.tenant_baz['id'],
+ role_id='member')
+
+ self.assertRaises(exception.RoleAssignmentNotFound,
+ self.assignment_api.check_grant_role_id,
+ group_id=uuid.uuid4().hex,
+ project_id=self.tenant_baz['id'],
+ role_id='member')
+
+ def test_get_role_assignment_by_domain_not_found(self):
+ self.assertRaises(exception.RoleAssignmentNotFound,
+ self.assignment_api.check_grant_role_id,
+ user_id=self.user_foo['id'],
+ domain_id=self.domain_default['id'],
+ role_id='member')
+
+ self.assertRaises(exception.RoleAssignmentNotFound,
+ self.assignment_api.check_grant_role_id,
+ group_id=uuid.uuid4().hex,
+ domain_id=self.domain_default['id'],
+ role_id='member')
+
+ def test_del_role_assignment_by_project_not_found(self):
+ self.assertRaises(exception.RoleAssignmentNotFound,
+ self.assignment_api.delete_grant,
+ user_id=self.user_foo['id'],
+ project_id=self.tenant_baz['id'],
+ role_id='member')
+
+ self.assertRaises(exception.RoleAssignmentNotFound,
+ self.assignment_api.delete_grant,
+ group_id=uuid.uuid4().hex,
+ project_id=self.tenant_baz['id'],
+ role_id='member')
+
+ def test_del_role_assignment_by_domain_not_found(self):
+ self.assertRaises(exception.RoleAssignmentNotFound,
+ self.assignment_api.delete_grant,
+ user_id=self.user_foo['id'],
+ domain_id=self.domain_default['id'],
+ role_id='member')
+
+ self.assertRaises(exception.RoleAssignmentNotFound,
+ self.assignment_api.delete_grant,
+ group_id=uuid.uuid4().hex,
+ domain_id=self.domain_default['id'],
+ role_id='member')
+
+ def test_get_and_remove_role_grant_by_group_and_project(self):
+ new_domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.resource_api.create_domain(new_domain['id'], new_domain)
+ new_group = {'domain_id': new_domain['id'], 'name': uuid.uuid4().hex}
+ new_group = self.identity_api.create_group(new_group)
+ new_user = {'name': 'new_user', 'password': 'secret',
+ 'enabled': True, 'domain_id': new_domain['id']}
+ new_user = self.identity_api.create_user(new_user)
+ self.identity_api.add_user_to_group(new_user['id'],
+ new_group['id'])
+ roles_ref = self.assignment_api.list_grants(
+ group_id=new_group['id'],
+ project_id=self.tenant_bar['id'])
+ self.assertEqual(0, len(roles_ref))
+ self.assignment_api.create_grant(group_id=new_group['id'],
+ project_id=self.tenant_bar['id'],
+ role_id='member')
+ roles_ref = self.assignment_api.list_grants(
+ group_id=new_group['id'],
+ project_id=self.tenant_bar['id'])
+ self.assertDictEqual(roles_ref[0], self.role_member)
+
+ self.assignment_api.delete_grant(group_id=new_group['id'],
+ project_id=self.tenant_bar['id'],
+ role_id='member')
+ roles_ref = self.assignment_api.list_grants(
+ group_id=new_group['id'],
+ project_id=self.tenant_bar['id'])
+ self.assertEqual(0, len(roles_ref))
+ self.assertRaises(exception.RoleAssignmentNotFound,
+ self.assignment_api.delete_grant,
+ group_id=new_group['id'],
+ project_id=self.tenant_bar['id'],
+ role_id='member')
+
+ def test_get_and_remove_role_grant_by_group_and_domain(self):
+ new_domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.resource_api.create_domain(new_domain['id'], new_domain)
+ new_group = {'domain_id': new_domain['id'], 'name': uuid.uuid4().hex}
+ new_group = self.identity_api.create_group(new_group)
+ new_user = {'name': 'new_user', 'password': uuid.uuid4().hex,
+ 'enabled': True, 'domain_id': new_domain['id']}
+ new_user = self.identity_api.create_user(new_user)
+ self.identity_api.add_user_to_group(new_user['id'],
+ new_group['id'])
+
+ roles_ref = self.assignment_api.list_grants(
+ group_id=new_group['id'],
+ domain_id=new_domain['id'])
+ self.assertEqual(0, len(roles_ref))
+
+ self.assignment_api.create_grant(group_id=new_group['id'],
+ domain_id=new_domain['id'],
+ role_id='member')
+
+ roles_ref = self.assignment_api.list_grants(
+ group_id=new_group['id'],
+ domain_id=new_domain['id'])
+ self.assertDictEqual(roles_ref[0], self.role_member)
+
+ self.assignment_api.delete_grant(group_id=new_group['id'],
+ domain_id=new_domain['id'],
+ role_id='member')
+ roles_ref = self.assignment_api.list_grants(
+ group_id=new_group['id'],
+ domain_id=new_domain['id'])
+ self.assertEqual(0, len(roles_ref))
+ self.assertRaises(exception.RoleAssignmentNotFound,
+ self.assignment_api.delete_grant,
+ group_id=new_group['id'],
+ domain_id=new_domain['id'],
+ role_id='member')
+
+ def test_get_and_remove_correct_role_grant_from_a_mix(self):
+ new_domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.resource_api.create_domain(new_domain['id'], new_domain)
+ new_project = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
+ 'domain_id': new_domain['id']}
+ self.resource_api.create_project(new_project['id'], new_project)
+ new_group = {'domain_id': new_domain['id'], 'name': uuid.uuid4().hex}
+ new_group = self.identity_api.create_group(new_group)
+ new_group2 = {'domain_id': new_domain['id'], 'name': uuid.uuid4().hex}
+ new_group2 = self.identity_api.create_group(new_group2)
+ new_user = {'name': 'new_user', 'password': uuid.uuid4().hex,
+ 'enabled': True, 'domain_id': new_domain['id']}
+ new_user = self.identity_api.create_user(new_user)
+ new_user2 = {'name': 'new_user2', 'password': uuid.uuid4().hex,
+ 'enabled': True, 'domain_id': new_domain['id']}
+ new_user2 = self.identity_api.create_user(new_user2)
+ self.identity_api.add_user_to_group(new_user['id'],
+ new_group['id'])
+ # First check we have no grants
+ roles_ref = self.assignment_api.list_grants(
+ group_id=new_group['id'],
+ domain_id=new_domain['id'])
+ self.assertEqual(0, len(roles_ref))
+ # Now add the grant we are going to test for, and some others as
+ # well just to make sure we get back the right one
+ self.assignment_api.create_grant(group_id=new_group['id'],
+ domain_id=new_domain['id'],
+ role_id='member')
+
+ self.assignment_api.create_grant(group_id=new_group2['id'],
+ domain_id=new_domain['id'],
+ role_id=self.role_admin['id'])
+ self.assignment_api.create_grant(user_id=new_user2['id'],
+ domain_id=new_domain['id'],
+ role_id=self.role_admin['id'])
+ self.assignment_api.create_grant(group_id=new_group['id'],
+ project_id=new_project['id'],
+ role_id=self.role_admin['id'])
+
+ roles_ref = self.assignment_api.list_grants(
+ group_id=new_group['id'],
+ domain_id=new_domain['id'])
+ self.assertDictEqual(roles_ref[0], self.role_member)
+
+ self.assignment_api.delete_grant(group_id=new_group['id'],
+ domain_id=new_domain['id'],
+ role_id='member')
+ roles_ref = self.assignment_api.list_grants(
+ group_id=new_group['id'],
+ domain_id=new_domain['id'])
+ self.assertEqual(0, len(roles_ref))
+ self.assertRaises(exception.RoleAssignmentNotFound,
+ self.assignment_api.delete_grant,
+ group_id=new_group['id'],
+ domain_id=new_domain['id'],
+ role_id='member')
+
+ def test_get_and_remove_role_grant_by_user_and_domain(self):
+ new_domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.resource_api.create_domain(new_domain['id'], new_domain)
+ new_user = {'name': 'new_user', 'password': 'secret',
+ 'enabled': True, 'domain_id': new_domain['id']}
+ new_user = self.identity_api.create_user(new_user)
+ roles_ref = self.assignment_api.list_grants(
+ user_id=new_user['id'],
+ domain_id=new_domain['id'])
+ self.assertEqual(0, len(roles_ref))
+ self.assignment_api.create_grant(user_id=new_user['id'],
+ domain_id=new_domain['id'],
+ role_id='member')
+ roles_ref = self.assignment_api.list_grants(
+ user_id=new_user['id'],
+ domain_id=new_domain['id'])
+ self.assertDictEqual(roles_ref[0], self.role_member)
+
+ self.assignment_api.delete_grant(user_id=new_user['id'],
+ domain_id=new_domain['id'],
+ role_id='member')
+ roles_ref = self.assignment_api.list_grants(
+ user_id=new_user['id'],
+ domain_id=new_domain['id'])
+ self.assertEqual(0, len(roles_ref))
+ self.assertRaises(exception.RoleAssignmentNotFound,
+ self.assignment_api.delete_grant,
+ user_id=new_user['id'],
+ domain_id=new_domain['id'],
+ role_id='member')
+
+ def test_get_and_remove_role_grant_by_group_and_cross_domain(self):
+ group1_domain1_role = {'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex}
+ self.role_api.create_role(group1_domain1_role['id'],
+ group1_domain1_role)
+ group1_domain2_role = {'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex}
+ self.role_api.create_role(group1_domain2_role['id'],
+ group1_domain2_role)
+ domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.resource_api.create_domain(domain1['id'], domain1)
+ domain2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.resource_api.create_domain(domain2['id'], domain2)
+ group1 = {'domain_id': domain1['id'], 'name': uuid.uuid4().hex}
+ group1 = self.identity_api.create_group(group1)
+ roles_ref = self.assignment_api.list_grants(
+ group_id=group1['id'],
+ domain_id=domain1['id'])
+ self.assertEqual(0, len(roles_ref))
+ roles_ref = self.assignment_api.list_grants(
+ group_id=group1['id'],
+ domain_id=domain2['id'])
+ self.assertEqual(0, len(roles_ref))
+ self.assignment_api.create_grant(group_id=group1['id'],
+ domain_id=domain1['id'],
+ role_id=group1_domain1_role['id'])
+ self.assignment_api.create_grant(group_id=group1['id'],
+ domain_id=domain2['id'],
+ role_id=group1_domain2_role['id'])
+ roles_ref = self.assignment_api.list_grants(
+ group_id=group1['id'],
+ domain_id=domain1['id'])
+ self.assertDictEqual(roles_ref[0], group1_domain1_role)
+ roles_ref = self.assignment_api.list_grants(
+ group_id=group1['id'],
+ domain_id=domain2['id'])
+ self.assertDictEqual(roles_ref[0], group1_domain2_role)
+
+ self.assignment_api.delete_grant(group_id=group1['id'],
+ domain_id=domain2['id'],
+ role_id=group1_domain2_role['id'])
+ roles_ref = self.assignment_api.list_grants(
+ group_id=group1['id'],
+ domain_id=domain2['id'])
+ self.assertEqual(0, len(roles_ref))
+ self.assertRaises(exception.RoleAssignmentNotFound,
+ self.assignment_api.delete_grant,
+ group_id=group1['id'],
+ domain_id=domain2['id'],
+ role_id=group1_domain2_role['id'])
+
+ def test_get_and_remove_role_grant_by_user_and_cross_domain(self):
+ user1_domain1_role = {'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex}
+ self.role_api.create_role(user1_domain1_role['id'], user1_domain1_role)
+ user1_domain2_role = {'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex}
+ self.role_api.create_role(user1_domain2_role['id'], user1_domain2_role)
+ domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.resource_api.create_domain(domain1['id'], domain1)
+ domain2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.resource_api.create_domain(domain2['id'], domain2)
+ user1 = {'name': uuid.uuid4().hex, 'domain_id': domain1['id'],
+ 'password': uuid.uuid4().hex, 'enabled': True}
+ user1 = self.identity_api.create_user(user1)
+ roles_ref = self.assignment_api.list_grants(
+ user_id=user1['id'],
+ domain_id=domain1['id'])
+ self.assertEqual(0, len(roles_ref))
+ roles_ref = self.assignment_api.list_grants(
+ user_id=user1['id'],
+ domain_id=domain2['id'])
+ self.assertEqual(0, len(roles_ref))
+ self.assignment_api.create_grant(user_id=user1['id'],
+ domain_id=domain1['id'],
+ role_id=user1_domain1_role['id'])
+ self.assignment_api.create_grant(user_id=user1['id'],
+ domain_id=domain2['id'],
+ role_id=user1_domain2_role['id'])
+ roles_ref = self.assignment_api.list_grants(
+ user_id=user1['id'],
+ domain_id=domain1['id'])
+ self.assertDictEqual(roles_ref[0], user1_domain1_role)
+ roles_ref = self.assignment_api.list_grants(
+ user_id=user1['id'],
+ domain_id=domain2['id'])
+ self.assertDictEqual(roles_ref[0], user1_domain2_role)
+
+ self.assignment_api.delete_grant(user_id=user1['id'],
+ domain_id=domain2['id'],
+ role_id=user1_domain2_role['id'])
+ roles_ref = self.assignment_api.list_grants(
+ user_id=user1['id'],
+ domain_id=domain2['id'])
+ self.assertEqual(0, len(roles_ref))
+ self.assertRaises(exception.RoleAssignmentNotFound,
+ self.assignment_api.delete_grant,
+ user_id=user1['id'],
+ domain_id=domain2['id'],
+ role_id=user1_domain2_role['id'])
+
+ def test_role_grant_by_group_and_cross_domain_project(self):
+ role1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.role_api.create_role(role1['id'], role1)
+ role2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.role_api.create_role(role2['id'], role2)
+ domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.resource_api.create_domain(domain1['id'], domain1)
+ domain2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.resource_api.create_domain(domain2['id'], domain2)
+ group1 = {'name': uuid.uuid4().hex, 'domain_id': domain1['id'],
+ 'enabled': True}
+ group1 = self.identity_api.create_group(group1)
+ project1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
+ 'domain_id': domain2['id']}
+ self.resource_api.create_project(project1['id'], project1)
+ roles_ref = self.assignment_api.list_grants(
+ group_id=group1['id'],
+ project_id=project1['id'])
+ self.assertEqual(0, len(roles_ref))
+ self.assignment_api.create_grant(group_id=group1['id'],
+ project_id=project1['id'],
+ role_id=role1['id'])
+ self.assignment_api.create_grant(group_id=group1['id'],
+ project_id=project1['id'],
+ role_id=role2['id'])
+ roles_ref = self.assignment_api.list_grants(
+ group_id=group1['id'],
+ project_id=project1['id'])
+
+ roles_ref_ids = []
+ for ref in roles_ref:
+ roles_ref_ids.append(ref['id'])
+ self.assertIn(role1['id'], roles_ref_ids)
+ self.assertIn(role2['id'], roles_ref_ids)
+
+ self.assignment_api.delete_grant(group_id=group1['id'],
+ project_id=project1['id'],
+ role_id=role1['id'])
+ roles_ref = self.assignment_api.list_grants(
+ group_id=group1['id'],
+ project_id=project1['id'])
+ self.assertEqual(1, len(roles_ref))
+ self.assertDictEqual(roles_ref[0], role2)
+
+ def test_role_grant_by_user_and_cross_domain_project(self):
+ role1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.role_api.create_role(role1['id'], role1)
+ role2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.role_api.create_role(role2['id'], role2)
+ domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.resource_api.create_domain(domain1['id'], domain1)
+ domain2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.resource_api.create_domain(domain2['id'], domain2)
+ user1 = {'name': uuid.uuid4().hex, 'domain_id': domain1['id'],
+ 'password': uuid.uuid4().hex, 'enabled': True}
+ user1 = self.identity_api.create_user(user1)
+ project1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
+ 'domain_id': domain2['id']}
+ self.resource_api.create_project(project1['id'], project1)
+ roles_ref = self.assignment_api.list_grants(
+ user_id=user1['id'],
+ project_id=project1['id'])
+ self.assertEqual(0, len(roles_ref))
+ self.assignment_api.create_grant(user_id=user1['id'],
+ project_id=project1['id'],
+ role_id=role1['id'])
+ self.assignment_api.create_grant(user_id=user1['id'],
+ project_id=project1['id'],
+ role_id=role2['id'])
+ roles_ref = self.assignment_api.list_grants(
+ user_id=user1['id'],
+ project_id=project1['id'])
+
+ roles_ref_ids = []
+ for ref in roles_ref:
+ roles_ref_ids.append(ref['id'])
+ self.assertIn(role1['id'], roles_ref_ids)
+ self.assertIn(role2['id'], roles_ref_ids)
+
+ self.assignment_api.delete_grant(user_id=user1['id'],
+ project_id=project1['id'],
+ role_id=role1['id'])
+ roles_ref = self.assignment_api.list_grants(
+ user_id=user1['id'],
+ project_id=project1['id'])
+ self.assertEqual(1, len(roles_ref))
+ self.assertDictEqual(roles_ref[0], role2)
+
+ def test_delete_user_grant_no_user(self):
+ # Can delete a grant where the user doesn't exist.
+ role_id = uuid.uuid4().hex
+ role = {'id': role_id, 'name': uuid.uuid4().hex}
+ self.role_api.create_role(role_id, role)
+
+ user_id = uuid.uuid4().hex
+
+ self.assignment_api.create_grant(role_id, user_id=user_id,
+ project_id=self.tenant_bar['id'])
+
+ self.assignment_api.delete_grant(role_id, user_id=user_id,
+ project_id=self.tenant_bar['id'])
+
+ def test_delete_group_grant_no_group(self):
+ # Can delete a grant where the group doesn't exist.
+ role_id = uuid.uuid4().hex
+ role = {'id': role_id, 'name': uuid.uuid4().hex}
+ self.role_api.create_role(role_id, role)
+
+ group_id = uuid.uuid4().hex
+
+ self.assignment_api.create_grant(role_id, group_id=group_id,
+ project_id=self.tenant_bar['id'])
+
+ self.assignment_api.delete_grant(role_id, group_id=group_id,
+ project_id=self.tenant_bar['id'])
+
+ def test_grant_crud_throws_exception_if_invalid_role(self):
+ """Ensure RoleNotFound thrown if role does not exist."""
+
+ def assert_role_not_found_exception(f, **kwargs):
+ self.assertRaises(exception.RoleNotFound, f,
+ role_id=uuid.uuid4().hex, **kwargs)
+
+ user = {'name': uuid.uuid4().hex, 'domain_id': DEFAULT_DOMAIN_ID,
+ 'password': uuid.uuid4().hex, 'enabled': True}
+ user_resp = self.identity_api.create_user(user)
+ group = {'name': uuid.uuid4().hex, 'domain_id': DEFAULT_DOMAIN_ID,
+ 'enabled': True}
+ group_resp = self.identity_api.create_group(group)
+ project = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
+ 'domain_id': DEFAULT_DOMAIN_ID}
+ project_resp = self.resource_api.create_project(project['id'], project)
+
+ for manager_call in [self.assignment_api.create_grant,
+ self.assignment_api.get_grant,
+ self.assignment_api.delete_grant]:
+ assert_role_not_found_exception(
+ manager_call,
+ user_id=user_resp['id'], project_id=project_resp['id'])
+ assert_role_not_found_exception(
+ manager_call,
+ group_id=group_resp['id'], project_id=project_resp['id'])
+ assert_role_not_found_exception(
+ manager_call,
+ user_id=user_resp['id'], domain_id=DEFAULT_DOMAIN_ID)
+ assert_role_not_found_exception(
+ manager_call,
+ group_id=group_resp['id'], domain_id=DEFAULT_DOMAIN_ID)
+
+ def test_multi_role_grant_by_user_group_on_project_domain(self):
+ role_list = []
+ for _ in range(10):
+ role = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.role_api.create_role(role['id'], role)
+ role_list.append(role)
+ domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.resource_api.create_domain(domain1['id'], domain1)
+ user1 = {'name': uuid.uuid4().hex, 'domain_id': domain1['id'],
+ 'password': uuid.uuid4().hex, 'enabled': True}
+ user1 = self.identity_api.create_user(user1)
+ group1 = {'name': uuid.uuid4().hex, 'domain_id': domain1['id'],
+ 'enabled': True}
+ group1 = self.identity_api.create_group(group1)
+ group2 = {'name': uuid.uuid4().hex, 'domain_id': domain1['id'],
+ 'enabled': True}
+ group2 = self.identity_api.create_group(group2)
+ project1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
+ 'domain_id': domain1['id']}
+ self.resource_api.create_project(project1['id'], project1)
+
+ self.identity_api.add_user_to_group(user1['id'],
+ group1['id'])
+ self.identity_api.add_user_to_group(user1['id'],
+ group2['id'])
+
+ roles_ref = self.assignment_api.list_grants(
+ user_id=user1['id'],
+ project_id=project1['id'])
+ self.assertEqual(0, len(roles_ref))
+ self.assignment_api.create_grant(user_id=user1['id'],
+ domain_id=domain1['id'],
+ role_id=role_list[0]['id'])
+ self.assignment_api.create_grant(user_id=user1['id'],
+ domain_id=domain1['id'],
+ role_id=role_list[1]['id'])
+ self.assignment_api.create_grant(group_id=group1['id'],
+ domain_id=domain1['id'],
+ role_id=role_list[2]['id'])
+ self.assignment_api.create_grant(group_id=group1['id'],
+ domain_id=domain1['id'],
+ role_id=role_list[3]['id'])
+ self.assignment_api.create_grant(user_id=user1['id'],
+ project_id=project1['id'],
+ role_id=role_list[4]['id'])
+ self.assignment_api.create_grant(user_id=user1['id'],
+ project_id=project1['id'],
+ role_id=role_list[5]['id'])
+ self.assignment_api.create_grant(group_id=group1['id'],
+ project_id=project1['id'],
+ role_id=role_list[6]['id'])
+ self.assignment_api.create_grant(group_id=group1['id'],
+ project_id=project1['id'],
+ role_id=role_list[7]['id'])
+ roles_ref = self.assignment_api.list_grants(user_id=user1['id'],
+ domain_id=domain1['id'])
+ self.assertEqual(2, len(roles_ref))
+ self.assertIn(role_list[0], roles_ref)
+ self.assertIn(role_list[1], roles_ref)
+ roles_ref = self.assignment_api.list_grants(group_id=group1['id'],
+ domain_id=domain1['id'])
+ self.assertEqual(2, len(roles_ref))
+ self.assertIn(role_list[2], roles_ref)
+ self.assertIn(role_list[3], roles_ref)
+ roles_ref = self.assignment_api.list_grants(user_id=user1['id'],
+ project_id=project1['id'])
+ self.assertEqual(2, len(roles_ref))
+ self.assertIn(role_list[4], roles_ref)
+ self.assertIn(role_list[5], roles_ref)
+ roles_ref = self.assignment_api.list_grants(group_id=group1['id'],
+ project_id=project1['id'])
+ self.assertEqual(2, len(roles_ref))
+ self.assertIn(role_list[6], roles_ref)
+ self.assertIn(role_list[7], roles_ref)
+
+ # Now test the alternate way of getting back lists of grants,
+ # where user and group roles are combined. These should match
+ # the above results.
+ combined_list = self.assignment_api.get_roles_for_user_and_project(
+ user1['id'], project1['id'])
+ self.assertEqual(4, len(combined_list))
+ self.assertIn(role_list[4]['id'], combined_list)
+ self.assertIn(role_list[5]['id'], combined_list)
+ self.assertIn(role_list[6]['id'], combined_list)
+ self.assertIn(role_list[7]['id'], combined_list)
+
+ combined_role_list = self.assignment_api.get_roles_for_user_and_domain(
+ user1['id'], domain1['id'])
+ self.assertEqual(4, len(combined_role_list))
+ self.assertIn(role_list[0]['id'], combined_role_list)
+ self.assertIn(role_list[1]['id'], combined_role_list)
+ self.assertIn(role_list[2]['id'], combined_role_list)
+ self.assertIn(role_list[3]['id'], combined_role_list)
+
+ def test_multi_group_grants_on_project_domain(self):
+ """Test multiple group roles for user on project and domain.
+
+ Test Plan:
+
+ - Create 6 roles
+ - Create a domain, with a project, user and two groups
+ - Make the user a member of both groups
+ - Check no roles yet exit
+ - Assign a role to each user and both groups on both the
+ project and domain
+ - Get a list of effective roles for the user on both the
+ project and domain, checking we get back the correct three
+ roles
+
+ """
+ role_list = []
+ for _ in range(6):
+ role = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.role_api.create_role(role['id'], role)
+ role_list.append(role)
+ domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.resource_api.create_domain(domain1['id'], domain1)
+ user1 = {'name': uuid.uuid4().hex, 'domain_id': domain1['id'],
+ 'password': uuid.uuid4().hex, 'enabled': True}
+ user1 = self.identity_api.create_user(user1)
+ group1 = {'name': uuid.uuid4().hex, 'domain_id': domain1['id'],
+ 'enabled': True}
+ group1 = self.identity_api.create_group(group1)
+ group2 = {'name': uuid.uuid4().hex, 'domain_id': domain1['id'],
+ 'enabled': True}
+ group2 = self.identity_api.create_group(group2)
+ project1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
+ 'domain_id': domain1['id']}
+ self.resource_api.create_project(project1['id'], project1)
+
+ self.identity_api.add_user_to_group(user1['id'],
+ group1['id'])
+ self.identity_api.add_user_to_group(user1['id'],
+ group2['id'])
+
+ roles_ref = self.assignment_api.list_grants(
+ user_id=user1['id'],
+ project_id=project1['id'])
+ self.assertEqual(0, len(roles_ref))
+ self.assignment_api.create_grant(user_id=user1['id'],
+ domain_id=domain1['id'],
+ role_id=role_list[0]['id'])
+ self.assignment_api.create_grant(group_id=group1['id'],
+ domain_id=domain1['id'],
+ role_id=role_list[1]['id'])
+ self.assignment_api.create_grant(group_id=group2['id'],
+ domain_id=domain1['id'],
+ role_id=role_list[2]['id'])
+ self.assignment_api.create_grant(user_id=user1['id'],
+ project_id=project1['id'],
+ role_id=role_list[3]['id'])
+ self.assignment_api.create_grant(group_id=group1['id'],
+ project_id=project1['id'],
+ role_id=role_list[4]['id'])
+ self.assignment_api.create_grant(group_id=group2['id'],
+ project_id=project1['id'],
+ role_id=role_list[5]['id'])
+
+ # Read by the roles, ensuring we get the correct 3 roles for
+ # both project and domain
+ combined_list = self.assignment_api.get_roles_for_user_and_project(
+ user1['id'], project1['id'])
+ self.assertEqual(3, len(combined_list))
+ self.assertIn(role_list[3]['id'], combined_list)
+ self.assertIn(role_list[4]['id'], combined_list)
+ self.assertIn(role_list[5]['id'], combined_list)
+
+ combined_role_list = self.assignment_api.get_roles_for_user_and_domain(
+ user1['id'], domain1['id'])
+ self.assertEqual(3, len(combined_role_list))
+ self.assertIn(role_list[0]['id'], combined_role_list)
+ self.assertIn(role_list[1]['id'], combined_role_list)
+ self.assertIn(role_list[2]['id'], combined_role_list)
+
+ def test_delete_role_with_user_and_group_grants(self):
+ role1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.role_api.create_role(role1['id'], role1)
+ domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.resource_api.create_domain(domain1['id'], domain1)
+ project1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
+ 'domain_id': domain1['id']}
+ self.resource_api.create_project(project1['id'], project1)
+ user1 = {'name': uuid.uuid4().hex, 'domain_id': domain1['id'],
+ 'password': uuid.uuid4().hex, 'enabled': True}
+ user1 = self.identity_api.create_user(user1)
+ group1 = {'name': uuid.uuid4().hex, 'domain_id': domain1['id'],
+ 'enabled': True}
+ group1 = self.identity_api.create_group(group1)
+ self.assignment_api.create_grant(user_id=user1['id'],
+ project_id=project1['id'],
+ role_id=role1['id'])
+ self.assignment_api.create_grant(user_id=user1['id'],
+ domain_id=domain1['id'],
+ role_id=role1['id'])
+ self.assignment_api.create_grant(group_id=group1['id'],
+ project_id=project1['id'],
+ role_id=role1['id'])
+ self.assignment_api.create_grant(group_id=group1['id'],
+ domain_id=domain1['id'],
+ role_id=role1['id'])
+ roles_ref = self.assignment_api.list_grants(
+ user_id=user1['id'],
+ project_id=project1['id'])
+ self.assertEqual(1, len(roles_ref))
+ roles_ref = self.assignment_api.list_grants(
+ group_id=group1['id'],
+ project_id=project1['id'])
+ self.assertEqual(1, len(roles_ref))
+ roles_ref = self.assignment_api.list_grants(
+ user_id=user1['id'],
+ domain_id=domain1['id'])
+ self.assertEqual(1, len(roles_ref))
+ roles_ref = self.assignment_api.list_grants(
+ group_id=group1['id'],
+ domain_id=domain1['id'])
+ self.assertEqual(1, len(roles_ref))
+ self.role_api.delete_role(role1['id'])
+ roles_ref = self.assignment_api.list_grants(
+ user_id=user1['id'],
+ project_id=project1['id'])
+ self.assertEqual(0, len(roles_ref))
+ roles_ref = self.assignment_api.list_grants(
+ group_id=group1['id'],
+ project_id=project1['id'])
+ self.assertEqual(0, len(roles_ref))
+ roles_ref = self.assignment_api.list_grants(
+ user_id=user1['id'],
+ domain_id=domain1['id'])
+ self.assertEqual(0, len(roles_ref))
+ roles_ref = self.assignment_api.list_grants(
+ group_id=group1['id'],
+ domain_id=domain1['id'])
+ self.assertEqual(0, len(roles_ref))
+
+ def test_delete_user_with_group_project_domain_links(self):
+ role1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.role_api.create_role(role1['id'], role1)
+ domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.resource_api.create_domain(domain1['id'], domain1)
+ project1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
+ 'domain_id': domain1['id']}
+ self.resource_api.create_project(project1['id'], project1)
+ user1 = {'name': uuid.uuid4().hex, 'domain_id': domain1['id'],
+ 'password': uuid.uuid4().hex, 'enabled': True}
+ user1 = self.identity_api.create_user(user1)
+ group1 = {'name': uuid.uuid4().hex, 'domain_id': domain1['id'],
+ 'enabled': True}
+ group1 = self.identity_api.create_group(group1)
+ self.assignment_api.create_grant(user_id=user1['id'],
+ project_id=project1['id'],
+ role_id=role1['id'])
+ self.assignment_api.create_grant(user_id=user1['id'],
+ domain_id=domain1['id'],
+ role_id=role1['id'])
+ self.identity_api.add_user_to_group(user_id=user1['id'],
+ group_id=group1['id'])
+ roles_ref = self.assignment_api.list_grants(
+ user_id=user1['id'],
+ project_id=project1['id'])
+ self.assertEqual(1, len(roles_ref))
+ roles_ref = self.assignment_api.list_grants(
+ user_id=user1['id'],
+ domain_id=domain1['id'])
+ self.assertEqual(1, len(roles_ref))
+ self.identity_api.check_user_in_group(
+ user_id=user1['id'],
+ group_id=group1['id'])
+ self.identity_api.delete_user(user1['id'])
+ self.assertRaises(exception.NotFound,
+ self.identity_api.check_user_in_group,
+ user1['id'],
+ group1['id'])
+
+ def test_delete_group_with_user_project_domain_links(self):
+ role1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.role_api.create_role(role1['id'], role1)
+ domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.resource_api.create_domain(domain1['id'], domain1)
+ project1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
+ 'domain_id': domain1['id']}
+ self.resource_api.create_project(project1['id'], project1)
+ user1 = {'name': uuid.uuid4().hex, 'domain_id': domain1['id'],
+ 'password': uuid.uuid4().hex, 'enabled': True}
+ user1 = self.identity_api.create_user(user1)
+ group1 = {'name': uuid.uuid4().hex, 'domain_id': domain1['id'],
+ 'enabled': True}
+ group1 = self.identity_api.create_group(group1)
+
+ self.assignment_api.create_grant(group_id=group1['id'],
+ project_id=project1['id'],
+ role_id=role1['id'])
+ self.assignment_api.create_grant(group_id=group1['id'],
+ domain_id=domain1['id'],
+ role_id=role1['id'])
+ self.identity_api.add_user_to_group(user_id=user1['id'],
+ group_id=group1['id'])
+ roles_ref = self.assignment_api.list_grants(
+ group_id=group1['id'],
+ project_id=project1['id'])
+ self.assertEqual(1, len(roles_ref))
+ roles_ref = self.assignment_api.list_grants(
+ group_id=group1['id'],
+ domain_id=domain1['id'])
+ self.assertEqual(1, len(roles_ref))
+ self.identity_api.check_user_in_group(
+ user_id=user1['id'],
+ group_id=group1['id'])
+ self.identity_api.delete_group(group1['id'])
+ self.identity_api.get_user(user1['id'])
+
+ def test_delete_domain_with_user_group_project_links(self):
+ # TODO(chungg):add test case once expected behaviour defined
+ pass
+
+ def test_add_user_to_project(self):
+ self.assignment_api.add_user_to_project(self.tenant_baz['id'],
+ self.user_foo['id'])
+ tenants = self.assignment_api.list_projects_for_user(
+ self.user_foo['id'])
+ self.assertIn(self.tenant_baz, tenants)
+
+ def test_add_user_to_project_missing_default_role(self):
+ self.role_api.delete_role(CONF.member_role_id)
+ self.assertRaises(exception.RoleNotFound,
+ self.role_api.get_role,
+ CONF.member_role_id)
+ self.assignment_api.add_user_to_project(self.tenant_baz['id'],
+ self.user_foo['id'])
+ tenants = (
+ self.assignment_api.list_projects_for_user(self.user_foo['id']))
+ self.assertIn(self.tenant_baz, tenants)
+ default_role = self.role_api.get_role(CONF.member_role_id)
+ self.assertIsNotNone(default_role)
+
+ def test_add_user_to_project_404(self):
+ self.assertRaises(exception.ProjectNotFound,
+ self.assignment_api.add_user_to_project,
+ uuid.uuid4().hex,
+ self.user_foo['id'])
+
+ def test_add_user_to_project_no_user(self):
+ # If add_user_to_project and the user doesn't exist, then
+ # no error.
+ user_id_not_exist = uuid.uuid4().hex
+ self.assignment_api.add_user_to_project(self.tenant_bar['id'],
+ user_id_not_exist)
+
+ def test_remove_user_from_project(self):
+ self.assignment_api.add_user_to_project(self.tenant_baz['id'],
+ self.user_foo['id'])
+ self.assignment_api.remove_user_from_project(self.tenant_baz['id'],
+ self.user_foo['id'])
+ tenants = self.assignment_api.list_projects_for_user(
+ self.user_foo['id'])
+ self.assertNotIn(self.tenant_baz, tenants)
+
+ def test_remove_user_from_project_race_delete_role(self):
+ self.assignment_api.add_user_to_project(self.tenant_baz['id'],
+ self.user_foo['id'])
+ self.assignment_api.add_role_to_user_and_project(
+ tenant_id=self.tenant_baz['id'],
+ user_id=self.user_foo['id'],
+ role_id=self.role_other['id'])
+
+ # Mock a race condition, delete a role after
+ # get_roles_for_user_and_project() is called in
+ # remove_user_from_project().
+ roles = self.assignment_api.get_roles_for_user_and_project(
+ self.user_foo['id'], self.tenant_baz['id'])
+ self.role_api.delete_role(self.role_other['id'])
+ self.assignment_api.get_roles_for_user_and_project = mock.Mock(
+ return_value=roles)
+ self.assignment_api.remove_user_from_project(self.tenant_baz['id'],
+ self.user_foo['id'])
+ tenants = self.assignment_api.list_projects_for_user(
+ self.user_foo['id'])
+ self.assertNotIn(self.tenant_baz, tenants)
+
+ def test_remove_user_from_project_404(self):
+ self.assertRaises(exception.ProjectNotFound,
+ self.assignment_api.remove_user_from_project,
+ uuid.uuid4().hex,
+ self.user_foo['id'])
+
+ self.assertRaises(exception.UserNotFound,
+ self.assignment_api.remove_user_from_project,
+ self.tenant_bar['id'],
+ uuid.uuid4().hex)
+
+ self.assertRaises(exception.NotFound,
+ self.assignment_api.remove_user_from_project,
+ self.tenant_baz['id'],
+ self.user_foo['id'])
+
+ def test_list_user_project_ids_404(self):
+ self.assertRaises(exception.UserNotFound,
+ self.assignment_api.list_projects_for_user,
+ uuid.uuid4().hex)
+
+ def test_update_project_404(self):
+ self.assertRaises(exception.ProjectNotFound,
+ self.resource_api.update_project,
+ uuid.uuid4().hex,
+ dict())
+
+ def test_delete_project_404(self):
+ self.assertRaises(exception.ProjectNotFound,
+ self.resource_api.delete_project,
+ uuid.uuid4().hex)
+
+ def test_update_user_404(self):
+ user_id = uuid.uuid4().hex
+ self.assertRaises(exception.UserNotFound,
+ self.identity_api.update_user,
+ user_id,
+ {'id': user_id,
+ 'domain_id': DEFAULT_DOMAIN_ID})
+
+ def test_delete_user_with_project_association(self):
+ user = {'name': uuid.uuid4().hex,
+ 'domain_id': DEFAULT_DOMAIN_ID,
+ 'password': uuid.uuid4().hex}
+ user = self.identity_api.create_user(user)
+ self.assignment_api.add_user_to_project(self.tenant_bar['id'],
+ user['id'])
+ self.identity_api.delete_user(user['id'])
+ self.assertRaises(exception.UserNotFound,
+ self.assignment_api.list_projects_for_user,
+ user['id'])
+
+ def test_delete_user_with_project_roles(self):
+ user = {'name': uuid.uuid4().hex,
+ 'domain_id': DEFAULT_DOMAIN_ID,
+ 'password': uuid.uuid4().hex}
+ user = self.identity_api.create_user(user)
+ self.assignment_api.add_role_to_user_and_project(
+ user['id'],
+ self.tenant_bar['id'],
+ self.role_member['id'])
+ self.identity_api.delete_user(user['id'])
+ self.assertRaises(exception.UserNotFound,
+ self.assignment_api.list_projects_for_user,
+ user['id'])
+
+ def test_delete_user_404(self):
+ self.assertRaises(exception.UserNotFound,
+ self.identity_api.delete_user,
+ uuid.uuid4().hex)
+
+ def test_delete_role_404(self):
+ self.assertRaises(exception.RoleNotFound,
+ self.role_api.delete_role,
+ uuid.uuid4().hex)
+
+ def test_create_update_delete_unicode_project(self):
+ unicode_project_name = u'name \u540d\u5b57'
+ project = {'id': uuid.uuid4().hex,
+ 'name': unicode_project_name,
+ 'description': uuid.uuid4().hex,
+ 'domain_id': CONF.identity.default_domain_id}
+ self.resource_api.create_project(project['id'], project)
+ self.resource_api.update_project(project['id'], project)
+ self.resource_api.delete_project(project['id'])
+
+ def test_create_project_with_no_enabled_field(self):
+ ref = {
+ 'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex.lower(),
+ 'domain_id': DEFAULT_DOMAIN_ID}
+ self.resource_api.create_project(ref['id'], ref)
+
+ project = self.resource_api.get_project(ref['id'])
+ self.assertIs(project['enabled'], True)
+
+ def test_create_project_long_name_fails(self):
+ tenant = {'id': 'fake1', 'name': 'a' * 65,
+ 'domain_id': DEFAULT_DOMAIN_ID}
+ self.assertRaises(exception.ValidationError,
+ self.resource_api.create_project,
+ tenant['id'],
+ tenant)
+
+ def test_create_project_blank_name_fails(self):
+ tenant = {'id': 'fake1', 'name': '',
+ 'domain_id': DEFAULT_DOMAIN_ID}
+ self.assertRaises(exception.ValidationError,
+ self.resource_api.create_project,
+ tenant['id'],
+ tenant)
+
+ def test_create_project_invalid_name_fails(self):
+ tenant = {'id': 'fake1', 'name': None,
+ 'domain_id': DEFAULT_DOMAIN_ID}
+ self.assertRaises(exception.ValidationError,
+ self.resource_api.create_project,
+ tenant['id'],
+ tenant)
+ tenant = {'id': 'fake1', 'name': 123,
+ 'domain_id': DEFAULT_DOMAIN_ID}
+ self.assertRaises(exception.ValidationError,
+ self.resource_api.create_project,
+ tenant['id'],
+ tenant)
+
+ def test_update_project_blank_name_fails(self):
+ tenant = {'id': 'fake1', 'name': 'fake1',
+ 'domain_id': DEFAULT_DOMAIN_ID}
+ self.resource_api.create_project('fake1', tenant)
+ tenant['name'] = ''
+ self.assertRaises(exception.ValidationError,
+ self.resource_api.update_project,
+ tenant['id'],
+ tenant)
+
+ def test_update_project_long_name_fails(self):
+ tenant = {'id': 'fake1', 'name': 'fake1',
+ 'domain_id': DEFAULT_DOMAIN_ID}
+ self.resource_api.create_project('fake1', tenant)
+ tenant['name'] = 'a' * 65
+ self.assertRaises(exception.ValidationError,
+ self.resource_api.update_project,
+ tenant['id'],
+ tenant)
+
+ def test_update_project_invalid_name_fails(self):
+ tenant = {'id': 'fake1', 'name': 'fake1',
+ 'domain_id': DEFAULT_DOMAIN_ID}
+ self.resource_api.create_project('fake1', tenant)
+ tenant['name'] = None
+ self.assertRaises(exception.ValidationError,
+ self.resource_api.update_project,
+ tenant['id'],
+ tenant)
+
+ tenant['name'] = 123
+ self.assertRaises(exception.ValidationError,
+ self.resource_api.update_project,
+ tenant['id'],
+ tenant)
+
+ def test_create_user_long_name_fails(self):
+ user = {'name': 'a' * 256,
+ 'domain_id': DEFAULT_DOMAIN_ID}
+ self.assertRaises(exception.ValidationError,
+ self.identity_api.create_user,
+ user)
+
+ def test_create_user_blank_name_fails(self):
+ user = {'name': '',
+ 'domain_id': DEFAULT_DOMAIN_ID}
+ self.assertRaises(exception.ValidationError,
+ self.identity_api.create_user,
+ user)
+
+ def test_create_user_missed_password(self):
+ user = {'name': 'fake1',
+ 'domain_id': DEFAULT_DOMAIN_ID}
+ user = self.identity_api.create_user(user)
+ self.identity_api.get_user(user['id'])
+ # Make sure the user is not allowed to login
+ # with a password that is empty string or None
+ self.assertRaises(AssertionError,
+ self.identity_api.authenticate,
+ context={},
+ user_id=user['id'],
+ password='')
+ self.assertRaises(AssertionError,
+ self.identity_api.authenticate,
+ context={},
+ user_id=user['id'],
+ password=None)
+
+ def test_create_user_none_password(self):
+ user = {'name': 'fake1', 'password': None,
+ 'domain_id': DEFAULT_DOMAIN_ID}
+ user = self.identity_api.create_user(user)
+ self.identity_api.get_user(user['id'])
+ # Make sure the user is not allowed to login
+ # with a password that is empty string or None
+ self.assertRaises(AssertionError,
+ self.identity_api.authenticate,
+ context={},
+ user_id=user['id'],
+ password='')
+ self.assertRaises(AssertionError,
+ self.identity_api.authenticate,
+ context={},
+ user_id=user['id'],
+ password=None)
+
+ def test_create_user_invalid_name_fails(self):
+ user = {'name': None,
+ 'domain_id': DEFAULT_DOMAIN_ID}
+ self.assertRaises(exception.ValidationError,
+ self.identity_api.create_user,
+ user)
+
+ user = {'name': 123,
+ 'domain_id': DEFAULT_DOMAIN_ID}
+ self.assertRaises(exception.ValidationError,
+ self.identity_api.create_user,
+ user)
+
+ def test_update_project_invalid_enabled_type_string(self):
+ project = {'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'enabled': True,
+ 'domain_id': DEFAULT_DOMAIN_ID}
+ self.resource_api.create_project(project['id'], project)
+ project_ref = self.resource_api.get_project(project['id'])
+ self.assertEqual(True, project_ref['enabled'])
+
+ # Strings are not valid boolean values
+ project['enabled'] = "false"
+ self.assertRaises(exception.ValidationError,
+ self.resource_api.update_project,
+ project['id'],
+ project)
+
+ def test_create_project_invalid_enabled_type_string(self):
+ project = {'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'domain_id': DEFAULT_DOMAIN_ID,
+ # invalid string value
+ 'enabled': "true"}
+ self.assertRaises(exception.ValidationError,
+ self.resource_api.create_project,
+ project['id'],
+ project)
+
+ def test_create_user_invalid_enabled_type_string(self):
+ user = {'name': uuid.uuid4().hex,
+ 'domain_id': DEFAULT_DOMAIN_ID,
+ 'password': uuid.uuid4().hex,
+ # invalid string value
+ 'enabled': "true"}
+ self.assertRaises(exception.ValidationError,
+ self.identity_api.create_user,
+ user)
+
+ def test_update_user_long_name_fails(self):
+ user = {'name': 'fake1',
+ 'domain_id': DEFAULT_DOMAIN_ID}
+ user = self.identity_api.create_user(user)
+ user['name'] = 'a' * 256
+ self.assertRaises(exception.ValidationError,
+ self.identity_api.update_user,
+ user['id'],
+ user)
+
+ def test_update_user_blank_name_fails(self):
+ user = {'name': 'fake1',
+ 'domain_id': DEFAULT_DOMAIN_ID}
+ user = self.identity_api.create_user(user)
+ user['name'] = ''
+ self.assertRaises(exception.ValidationError,
+ self.identity_api.update_user,
+ user['id'],
+ user)
+
+ def test_update_user_invalid_name_fails(self):
+ user = {'name': 'fake1',
+ 'domain_id': DEFAULT_DOMAIN_ID}
+ user = self.identity_api.create_user(user)
+
+ user['name'] = None
+ self.assertRaises(exception.ValidationError,
+ self.identity_api.update_user,
+ user['id'],
+ user)
+
+ user['name'] = 123
+ self.assertRaises(exception.ValidationError,
+ self.identity_api.update_user,
+ user['id'],
+ user)
+
+ def test_list_users(self):
+ users = self.identity_api.list_users(
+ domain_scope=self._set_domain_scope(DEFAULT_DOMAIN_ID))
+ self.assertEqual(len(default_fixtures.USERS), len(users))
+ user_ids = set(user['id'] for user in users)
+ expected_user_ids = set(getattr(self, 'user_%s' % user['id'])['id']
+ for user in default_fixtures.USERS)
+ for user_ref in users:
+ self.assertNotIn('password', user_ref)
+ self.assertEqual(expected_user_ids, user_ids)
+
+ def test_list_groups(self):
+ group1 = {
+ 'domain_id': DEFAULT_DOMAIN_ID,
+ 'name': uuid.uuid4().hex}
+ group2 = {
+ 'domain_id': DEFAULT_DOMAIN_ID,
+ 'name': uuid.uuid4().hex}
+ group1 = self.identity_api.create_group(group1)
+ group2 = self.identity_api.create_group(group2)
+ groups = self.identity_api.list_groups(
+ domain_scope=self._set_domain_scope(DEFAULT_DOMAIN_ID))
+ self.assertEqual(2, len(groups))
+ group_ids = []
+ for group in groups:
+ group_ids.append(group.get('id'))
+ self.assertIn(group1['id'], group_ids)
+ self.assertIn(group2['id'], group_ids)
+
+ def test_list_domains(self):
+ domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ domain2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.resource_api.create_domain(domain1['id'], domain1)
+ self.resource_api.create_domain(domain2['id'], domain2)
+ domains = self.resource_api.list_domains()
+ self.assertEqual(3, len(domains))
+ domain_ids = []
+ for domain in domains:
+ domain_ids.append(domain.get('id'))
+ self.assertIn(DEFAULT_DOMAIN_ID, domain_ids)
+ self.assertIn(domain1['id'], domain_ids)
+ self.assertIn(domain2['id'], domain_ids)
+
+ def test_list_projects(self):
+ projects = self.resource_api.list_projects()
+ self.assertEqual(4, len(projects))
+ project_ids = []
+ for project in projects:
+ project_ids.append(project.get('id'))
+ self.assertIn(self.tenant_bar['id'], project_ids)
+ self.assertIn(self.tenant_baz['id'], project_ids)
+
+ def test_list_projects_with_multiple_filters(self):
+ # Create a project
+ project = {'id': uuid.uuid4().hex, 'domain_id': DEFAULT_DOMAIN_ID,
+ 'name': uuid.uuid4().hex, 'description': uuid.uuid4().hex,
+ 'enabled': True, 'parent_id': None}
+ self.resource_api.create_project(project['id'], project)
+
+ # Build driver hints with the project's name and inexistent description
+ hints = driver_hints.Hints()
+ hints.add_filter('name', project['name'])
+ hints.add_filter('description', uuid.uuid4().hex)
+
+ # Retrieve projects based on hints and check an empty list is returned
+ projects = self.resource_api.list_projects(hints)
+ self.assertEqual([], projects)
+
+ # Build correct driver hints
+ hints = driver_hints.Hints()
+ hints.add_filter('name', project['name'])
+ hints.add_filter('description', project['description'])
+
+ # Retrieve projects based on hints
+ projects = self.resource_api.list_projects(hints)
+
+ # Check that the returned list contains only the first project
+ self.assertEqual(1, len(projects))
+ self.assertEqual(project, projects[0])
+
+ def test_list_projects_for_domain(self):
+ project_ids = ([x['id'] for x in
+ self.resource_api.list_projects_in_domain(
+ DEFAULT_DOMAIN_ID)])
+ self.assertEqual(4, len(project_ids))
+ self.assertIn(self.tenant_bar['id'], project_ids)
+ self.assertIn(self.tenant_baz['id'], project_ids)
+ self.assertIn(self.tenant_mtu['id'], project_ids)
+ self.assertIn(self.tenant_service['id'], project_ids)
+
+ @tests.skip_if_no_multiple_domains_support
+ def test_list_projects_for_alternate_domain(self):
+ domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.resource_api.create_domain(domain1['id'], domain1)
+ project1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
+ 'domain_id': domain1['id']}
+ self.resource_api.create_project(project1['id'], project1)
+ project2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
+ 'domain_id': domain1['id']}
+ self.resource_api.create_project(project2['id'], project2)
+ project_ids = ([x['id'] for x in
+ self.resource_api.list_projects_in_domain(
+ domain1['id'])])
+ self.assertEqual(2, len(project_ids))
+ self.assertIn(project1['id'], project_ids)
+ self.assertIn(project2['id'], project_ids)
+
+ def _create_projects_hierarchy(self, hierarchy_size=2,
+ domain_id=DEFAULT_DOMAIN_ID):
+ """Creates a project hierarchy with specified size.
+
+ :param hierarchy_size: the desired hierarchy size, default is 2 -
+ a project with one child.
+ :param domain_id: domain where the projects hierarchy will be created.
+
+ :returns projects: a list of the projects in the created hierarchy.
+
+ """
+ project_id = uuid.uuid4().hex
+ project = {'id': project_id,
+ 'description': '',
+ 'domain_id': domain_id,
+ 'enabled': True,
+ 'name': uuid.uuid4().hex,
+ 'parent_id': None}
+ self.resource_api.create_project(project_id, project)
+
+ projects = [project]
+ for i in range(1, hierarchy_size):
+ new_project = {'id': uuid.uuid4().hex,
+ 'description': '',
+ 'domain_id': domain_id,
+ 'enabled': True,
+ 'name': uuid.uuid4().hex,
+ 'parent_id': project_id}
+ self.resource_api.create_project(new_project['id'], new_project)
+ projects.append(new_project)
+ project_id = new_project['id']
+
+ return projects
+
+ def test_check_leaf_projects(self):
+ projects_hierarchy = self._create_projects_hierarchy()
+ root_project = projects_hierarchy[0]
+ leaf_project = projects_hierarchy[1]
+
+ self.assertFalse(self.resource_api.is_leaf_project(
+ root_project['id']))
+ self.assertTrue(self.resource_api.is_leaf_project(
+ leaf_project['id']))
+
+ # Delete leaf_project
+ self.resource_api.delete_project(leaf_project['id'])
+
+ # Now, root_project should be leaf
+ self.assertTrue(self.resource_api.is_leaf_project(
+ root_project['id']))
+
+ def test_list_projects_in_subtree(self):
+ projects_hierarchy = self._create_projects_hierarchy(hierarchy_size=3)
+ project1 = projects_hierarchy[0]
+ project2 = projects_hierarchy[1]
+ project3 = projects_hierarchy[2]
+ project4 = {'id': uuid.uuid4().hex,
+ 'description': '',
+ 'domain_id': DEFAULT_DOMAIN_ID,
+ 'enabled': True,
+ 'name': uuid.uuid4().hex,
+ 'parent_id': project2['id']}
+ self.resource_api.create_project(project4['id'], project4)
+
+ subtree = self.resource_api.list_projects_in_subtree(project1['id'])
+ self.assertEqual(3, len(subtree))
+ self.assertIn(project2, subtree)
+ self.assertIn(project3, subtree)
+ self.assertIn(project4, subtree)
+
+ subtree = self.resource_api.list_projects_in_subtree(project2['id'])
+ self.assertEqual(2, len(subtree))
+ self.assertIn(project3, subtree)
+ self.assertIn(project4, subtree)
+
+ subtree = self.resource_api.list_projects_in_subtree(project3['id'])
+ self.assertEqual(0, len(subtree))
+
+ def test_list_project_parents(self):
+ projects_hierarchy = self._create_projects_hierarchy(hierarchy_size=3)
+ project1 = projects_hierarchy[0]
+ project2 = projects_hierarchy[1]
+ project3 = projects_hierarchy[2]
+ project4 = {'id': uuid.uuid4().hex,
+ 'description': '',
+ 'domain_id': DEFAULT_DOMAIN_ID,
+ 'enabled': True,
+ 'name': uuid.uuid4().hex,
+ 'parent_id': project2['id']}
+ self.resource_api.create_project(project4['id'], project4)
+
+ parents1 = self.resource_api.list_project_parents(project3['id'])
+ self.assertEqual(2, len(parents1))
+ self.assertIn(project1, parents1)
+ self.assertIn(project2, parents1)
+
+ parents2 = self.resource_api.list_project_parents(project4['id'])
+ self.assertEqual(parents1, parents2)
+
+ parents = self.resource_api.list_project_parents(project1['id'])
+ self.assertEqual(0, len(parents))
+
+ def test_delete_project_with_role_assignments(self):
+ tenant = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
+ 'domain_id': DEFAULT_DOMAIN_ID}
+ self.resource_api.create_project(tenant['id'], tenant)
+ self.assignment_api.add_role_to_user_and_project(
+ self.user_foo['id'], tenant['id'], 'member')
+ self.resource_api.delete_project(tenant['id'])
+ self.assertRaises(exception.NotFound,
+ self.resource_api.get_project,
+ tenant['id'])
+
+ def test_delete_role_check_role_grant(self):
+ role = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ alt_role = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.role_api.create_role(role['id'], role)
+ self.role_api.create_role(alt_role['id'], alt_role)
+ self.assignment_api.add_role_to_user_and_project(
+ self.user_foo['id'], self.tenant_bar['id'], role['id'])
+ self.assignment_api.add_role_to_user_and_project(
+ self.user_foo['id'], self.tenant_bar['id'], alt_role['id'])
+ self.role_api.delete_role(role['id'])
+ roles_ref = self.assignment_api.get_roles_for_user_and_project(
+ self.user_foo['id'], self.tenant_bar['id'])
+ self.assertNotIn(role['id'], roles_ref)
+ self.assertIn(alt_role['id'], roles_ref)
+
+ def test_create_project_doesnt_modify_passed_in_dict(self):
+ new_project = {'id': 'tenant_id', 'name': uuid.uuid4().hex,
+ 'domain_id': DEFAULT_DOMAIN_ID}
+ original_project = new_project.copy()
+ self.resource_api.create_project('tenant_id', new_project)
+ self.assertDictEqual(original_project, new_project)
+
+ def test_create_user_doesnt_modify_passed_in_dict(self):
+ new_user = {'name': uuid.uuid4().hex, 'password': uuid.uuid4().hex,
+ 'domain_id': DEFAULT_DOMAIN_ID}
+ original_user = new_user.copy()
+ self.identity_api.create_user(new_user)
+ self.assertDictEqual(original_user, new_user)
+
+ def test_update_user_enable(self):
+ user = {'name': 'fake1', 'enabled': True,
+ 'domain_id': DEFAULT_DOMAIN_ID}
+ user = self.identity_api.create_user(user)
+ user_ref = self.identity_api.get_user(user['id'])
+ self.assertEqual(True, user_ref['enabled'])
+
+ user['enabled'] = False
+ self.identity_api.update_user(user['id'], user)
+ user_ref = self.identity_api.get_user(user['id'])
+ self.assertEqual(user['enabled'], user_ref['enabled'])
+
+ # If not present, enabled field should not be updated
+ del user['enabled']
+ self.identity_api.update_user(user['id'], user)
+ user_ref = self.identity_api.get_user(user['id'])
+ self.assertEqual(False, user_ref['enabled'])
+
+ user['enabled'] = True
+ self.identity_api.update_user(user['id'], user)
+ user_ref = self.identity_api.get_user(user['id'])
+ self.assertEqual(user['enabled'], user_ref['enabled'])
+
+ del user['enabled']
+ self.identity_api.update_user(user['id'], user)
+ user_ref = self.identity_api.get_user(user['id'])
+ self.assertEqual(True, user_ref['enabled'])
+
+ # Integers are valid Python's booleans. Explicitly test it.
+ user['enabled'] = 0
+ self.identity_api.update_user(user['id'], user)
+ user_ref = self.identity_api.get_user(user['id'])
+ self.assertEqual(False, user_ref['enabled'])
+
+ # Any integers other than 0 are interpreted as True
+ user['enabled'] = -42
+ self.identity_api.update_user(user['id'], user)
+ user_ref = self.identity_api.get_user(user['id'])
+ self.assertEqual(True, user_ref['enabled'])
+
+ def test_update_user_name(self):
+ user = {'name': uuid.uuid4().hex,
+ 'enabled': True,
+ 'domain_id': DEFAULT_DOMAIN_ID}
+ user = self.identity_api.create_user(user)
+ user_ref = self.identity_api.get_user(user['id'])
+ self.assertEqual(user['name'], user_ref['name'])
+
+ changed_name = user_ref['name'] + '_changed'
+ user_ref['name'] = changed_name
+ updated_user = self.identity_api.update_user(user_ref['id'], user_ref)
+
+ # NOTE(dstanek): the SQL backend adds an 'extra' field containing a
+ # dictionary of the extra fields in addition to the
+ # fields in the object. For the details see:
+ # SqlIdentity.test_update_project_returns_extra
+ updated_user.pop('extra', None)
+
+ self.assertDictEqual(user_ref, updated_user)
+
+ user_ref = self.identity_api.get_user(user_ref['id'])
+ self.assertEqual(changed_name, user_ref['name'])
+
+ def test_update_user_enable_fails(self):
+ user = {'name': 'fake1', 'enabled': True,
+ 'domain_id': DEFAULT_DOMAIN_ID}
+ user = self.identity_api.create_user(user)
+ user_ref = self.identity_api.get_user(user['id'])
+ self.assertEqual(True, user_ref['enabled'])
+
+ # Strings are not valid boolean values
+ user['enabled'] = "false"
+ self.assertRaises(exception.ValidationError,
+ self.identity_api.update_user,
+ user['id'],
+ user)
+
+ def test_update_project_enable(self):
+ tenant = {'id': 'fake1', 'name': 'fake1', 'enabled': True,
+ 'domain_id': DEFAULT_DOMAIN_ID}
+ self.resource_api.create_project('fake1', tenant)
+ tenant_ref = self.resource_api.get_project('fake1')
+ self.assertEqual(True, tenant_ref['enabled'])
+
+ tenant['enabled'] = False
+ self.resource_api.update_project('fake1', tenant)
+ tenant_ref = self.resource_api.get_project('fake1')
+ self.assertEqual(tenant['enabled'], tenant_ref['enabled'])
+
+ # If not present, enabled field should not be updated
+ del tenant['enabled']
+ self.resource_api.update_project('fake1', tenant)
+ tenant_ref = self.resource_api.get_project('fake1')
+ self.assertEqual(False, tenant_ref['enabled'])
+
+ tenant['enabled'] = True
+ self.resource_api.update_project('fake1', tenant)
+ tenant_ref = self.resource_api.get_project('fake1')
+ self.assertEqual(tenant['enabled'], tenant_ref['enabled'])
+
+ del tenant['enabled']
+ self.resource_api.update_project('fake1', tenant)
+ tenant_ref = self.resource_api.get_project('fake1')
+ self.assertEqual(True, tenant_ref['enabled'])
+
+ def test_add_user_to_group(self):
+ domain = self._get_domain_fixture()
+ new_group = {'domain_id': domain['id'], 'name': uuid.uuid4().hex}
+ new_group = self.identity_api.create_group(new_group)
+ new_user = {'name': 'new_user', 'password': uuid.uuid4().hex,
+ 'enabled': True, 'domain_id': domain['id']}
+ new_user = self.identity_api.create_user(new_user)
+ self.identity_api.add_user_to_group(new_user['id'],
+ new_group['id'])
+ groups = self.identity_api.list_groups_for_user(new_user['id'])
+
+ found = False
+ for x in groups:
+ if (x['id'] == new_group['id']):
+ found = True
+ self.assertTrue(found)
+
+ def test_add_user_to_group_404(self):
+ domain = self._get_domain_fixture()
+ new_user = {'name': 'new_user', 'password': uuid.uuid4().hex,
+ 'enabled': True, 'domain_id': domain['id']}
+ new_user = self.identity_api.create_user(new_user)
+ self.assertRaises(exception.GroupNotFound,
+ self.identity_api.add_user_to_group,
+ new_user['id'],
+ uuid.uuid4().hex)
+
+ new_group = {'domain_id': domain['id'], 'name': uuid.uuid4().hex}
+ new_group = self.identity_api.create_group(new_group)
+ self.assertRaises(exception.UserNotFound,
+ self.identity_api.add_user_to_group,
+ uuid.uuid4().hex,
+ new_group['id'])
+
+ self.assertRaises(exception.NotFound,
+ self.identity_api.add_user_to_group,
+ uuid.uuid4().hex,
+ uuid.uuid4().hex)
+
+ def test_check_user_in_group(self):
+ domain = self._get_domain_fixture()
+ new_group = {'domain_id': domain['id'], 'name': uuid.uuid4().hex}
+ new_group = self.identity_api.create_group(new_group)
+ new_user = {'name': 'new_user', 'password': uuid.uuid4().hex,
+ 'enabled': True, 'domain_id': domain['id']}
+ new_user = self.identity_api.create_user(new_user)
+ self.identity_api.add_user_to_group(new_user['id'],
+ new_group['id'])
+ self.identity_api.check_user_in_group(new_user['id'], new_group['id'])
+
+ def test_create_invalid_domain_fails(self):
+ new_group = {'domain_id': "doesnotexist", 'name': uuid.uuid4().hex}
+ self.assertRaises(exception.DomainNotFound,
+ self.identity_api.create_group,
+ new_group)
+ new_user = {'name': 'new_user', 'password': uuid.uuid4().hex,
+ 'enabled': True, 'domain_id': "doesnotexist"}
+ self.assertRaises(exception.DomainNotFound,
+ self.identity_api.create_user,
+ new_user)
+
+ def test_check_user_not_in_group(self):
+ new_group = {
+ 'domain_id': DEFAULT_DOMAIN_ID,
+ 'name': uuid.uuid4().hex}
+ new_group = self.identity_api.create_group(new_group)
+
+ new_user = {'name': 'new_user', 'password': uuid.uuid4().hex,
+ 'enabled': True, 'domain_id': DEFAULT_DOMAIN_ID}
+ new_user = self.identity_api.create_user(new_user)
+
+ self.assertRaises(exception.NotFound,
+ self.identity_api.check_user_in_group,
+ new_user['id'],
+ new_group['id'])
+
+ def test_check_user_in_group_404(self):
+ new_user = {'name': 'new_user', 'password': uuid.uuid4().hex,
+ 'enabled': True, 'domain_id': DEFAULT_DOMAIN_ID}
+ new_user = self.identity_api.create_user(new_user)
+
+ new_group = {
+ 'domain_id': DEFAULT_DOMAIN_ID,
+ 'name': uuid.uuid4().hex}
+ new_group = self.identity_api.create_group(new_group)
+
+ self.assertRaises(exception.UserNotFound,
+ self.identity_api.check_user_in_group,
+ uuid.uuid4().hex,
+ new_group['id'])
+
+ self.assertRaises(exception.GroupNotFound,
+ self.identity_api.check_user_in_group,
+ new_user['id'],
+ uuid.uuid4().hex)
+
+ self.assertRaises(exception.NotFound,
+ self.identity_api.check_user_in_group,
+ uuid.uuid4().hex,
+ uuid.uuid4().hex)
+
+ def test_list_users_in_group(self):
+ domain = self._get_domain_fixture()
+ new_group = {'domain_id': domain['id'], 'name': uuid.uuid4().hex}
+ new_group = self.identity_api.create_group(new_group)
+ # Make sure we get an empty list back on a new group, not an error.
+ user_refs = self.identity_api.list_users_in_group(new_group['id'])
+ self.assertEqual([], user_refs)
+ # Make sure we get the correct users back once they have been added
+ # to the group.
+ new_user = {'name': 'new_user', 'password': uuid.uuid4().hex,
+ 'enabled': True, 'domain_id': domain['id']}
+ new_user = self.identity_api.create_user(new_user)
+ self.identity_api.add_user_to_group(new_user['id'],
+ new_group['id'])
+ user_refs = self.identity_api.list_users_in_group(new_group['id'])
+ found = False
+ for x in user_refs:
+ if (x['id'] == new_user['id']):
+ found = True
+ self.assertNotIn('password', x)
+ self.assertTrue(found)
+
+ def test_list_users_in_group_404(self):
+ self.assertRaises(exception.GroupNotFound,
+ self.identity_api.list_users_in_group,
+ uuid.uuid4().hex)
+
+ def test_list_groups_for_user(self):
+ domain = self._get_domain_fixture()
+ test_groups = []
+ test_users = []
+ GROUP_COUNT = 3
+ USER_COUNT = 2
+
+ for x in range(0, USER_COUNT):
+ new_user = {'name': uuid.uuid4().hex, 'password': uuid.uuid4().hex,
+ 'enabled': True, 'domain_id': domain['id']}
+ new_user = self.identity_api.create_user(new_user)
+ test_users.append(new_user)
+ positive_user = test_users[0]
+ negative_user = test_users[1]
+
+ for x in range(0, USER_COUNT):
+ group_refs = self.identity_api.list_groups_for_user(
+ test_users[x]['id'])
+ self.assertEqual(0, len(group_refs))
+
+ for x in range(0, GROUP_COUNT):
+ before_count = x
+ after_count = x + 1
+ new_group = {'domain_id': domain['id'],
+ 'name': uuid.uuid4().hex}
+ new_group = self.identity_api.create_group(new_group)
+ test_groups.append(new_group)
+
+ # add the user to the group and ensure that the
+ # group count increases by one for each
+ group_refs = self.identity_api.list_groups_for_user(
+ positive_user['id'])
+ self.assertEqual(before_count, len(group_refs))
+ self.identity_api.add_user_to_group(
+ positive_user['id'],
+ new_group['id'])
+ group_refs = self.identity_api.list_groups_for_user(
+ positive_user['id'])
+ self.assertEqual(after_count, len(group_refs))
+
+ # Make sure the group count for the unrelated user did not change
+ group_refs = self.identity_api.list_groups_for_user(
+ negative_user['id'])
+ self.assertEqual(0, len(group_refs))
+
+ # remove the user from each group and ensure that
+ # the group count reduces by one for each
+ for x in range(0, 3):
+ before_count = GROUP_COUNT - x
+ after_count = GROUP_COUNT - x - 1
+ group_refs = self.identity_api.list_groups_for_user(
+ positive_user['id'])
+ self.assertEqual(before_count, len(group_refs))
+ self.identity_api.remove_user_from_group(
+ positive_user['id'],
+ test_groups[x]['id'])
+ group_refs = self.identity_api.list_groups_for_user(
+ positive_user['id'])
+ self.assertEqual(after_count, len(group_refs))
+ # Make sure the group count for the unrelated user
+ # did not change
+ group_refs = self.identity_api.list_groups_for_user(
+ negative_user['id'])
+ self.assertEqual(0, len(group_refs))
+
+ def test_remove_user_from_group(self):
+ domain = self._get_domain_fixture()
+ new_group = {'domain_id': domain['id'], 'name': uuid.uuid4().hex}
+ new_group = self.identity_api.create_group(new_group)
+ new_user = {'name': 'new_user', 'password': uuid.uuid4().hex,
+ 'enabled': True, 'domain_id': domain['id']}
+ new_user = self.identity_api.create_user(new_user)
+ self.identity_api.add_user_to_group(new_user['id'],
+ new_group['id'])
+ groups = self.identity_api.list_groups_for_user(new_user['id'])
+ self.assertIn(new_group['id'], [x['id'] for x in groups])
+ self.identity_api.remove_user_from_group(new_user['id'],
+ new_group['id'])
+ groups = self.identity_api.list_groups_for_user(new_user['id'])
+ self.assertNotIn(new_group['id'], [x['id'] for x in groups])
+
+ def test_remove_user_from_group_404(self):
+ domain = self._get_domain_fixture()
+ new_user = {'name': 'new_user', 'password': uuid.uuid4().hex,
+ 'enabled': True, 'domain_id': domain['id']}
+ new_user = self.identity_api.create_user(new_user)
+ new_group = {'domain_id': domain['id'], 'name': uuid.uuid4().hex}
+ new_group = self.identity_api.create_group(new_group)
+ self.assertRaises(exception.GroupNotFound,
+ self.identity_api.remove_user_from_group,
+ new_user['id'],
+ uuid.uuid4().hex)
+
+ self.assertRaises(exception.UserNotFound,
+ self.identity_api.remove_user_from_group,
+ uuid.uuid4().hex,
+ new_group['id'])
+
+ self.assertRaises(exception.NotFound,
+ self.identity_api.remove_user_from_group,
+ uuid.uuid4().hex,
+ uuid.uuid4().hex)
+
+ def test_group_crud(self):
+ domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.resource_api.create_domain(domain['id'], domain)
+ group = {'domain_id': domain['id'], 'name': uuid.uuid4().hex}
+ group = self.identity_api.create_group(group)
+ group_ref = self.identity_api.get_group(group['id'])
+ self.assertDictContainsSubset(group, group_ref)
+
+ group['name'] = uuid.uuid4().hex
+ self.identity_api.update_group(group['id'], group)
+ group_ref = self.identity_api.get_group(group['id'])
+ self.assertDictContainsSubset(group, group_ref)
+
+ self.identity_api.delete_group(group['id'])
+ self.assertRaises(exception.GroupNotFound,
+ self.identity_api.get_group,
+ group['id'])
+
+ def test_get_group_by_name(self):
+ group_name = uuid.uuid4().hex
+ group = {'domain_id': DEFAULT_DOMAIN_ID, 'name': group_name}
+ group = self.identity_api.create_group(group)
+ spoiler = {'domain_id': DEFAULT_DOMAIN_ID, 'name': uuid.uuid4().hex}
+ self.identity_api.create_group(spoiler)
+
+ group_ref = self.identity_api.get_group_by_name(
+ group_name, DEFAULT_DOMAIN_ID)
+ self.assertDictEqual(group_ref, group)
+
+ def test_get_group_by_name_404(self):
+ self.assertRaises(exception.GroupNotFound,
+ self.identity_api.get_group_by_name,
+ uuid.uuid4().hex,
+ DEFAULT_DOMAIN_ID)
+
+ @tests.skip_if_cache_disabled('identity')
+ def test_cache_layer_group_crud(self):
+ group = {'domain_id': DEFAULT_DOMAIN_ID, 'name': uuid.uuid4().hex}
+ group = self.identity_api.create_group(group)
+ # cache the result
+ group_ref = self.identity_api.get_group(group['id'])
+ # delete the group bypassing identity api.
+ domain_id, driver, entity_id = (
+ self.identity_api._get_domain_driver_and_entity_id(group['id']))
+ driver.delete_group(entity_id)
+
+ self.assertEqual(group_ref, self.identity_api.get_group(group['id']))
+ self.identity_api.get_group.invalidate(self.identity_api, group['id'])
+ self.assertRaises(exception.GroupNotFound,
+ self.identity_api.get_group, group['id'])
+
+ group = {'domain_id': DEFAULT_DOMAIN_ID, 'name': uuid.uuid4().hex}
+ group = self.identity_api.create_group(group)
+ # cache the result
+ self.identity_api.get_group(group['id'])
+ group['name'] = uuid.uuid4().hex
+ group_ref = self.identity_api.update_group(group['id'], group)
+ # after updating through identity api, get updated group
+ self.assertDictContainsSubset(self.identity_api.get_group(group['id']),
+ group_ref)
+
+ def test_create_duplicate_group_name_fails(self):
+ group1 = {'domain_id': DEFAULT_DOMAIN_ID, 'name': uuid.uuid4().hex}
+ group2 = {'domain_id': DEFAULT_DOMAIN_ID, 'name': group1['name']}
+ group1 = self.identity_api.create_group(group1)
+ self.assertRaises(exception.Conflict,
+ self.identity_api.create_group,
+ group2)
+
+ def test_create_duplicate_group_name_in_different_domains(self):
+ new_domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.resource_api.create_domain(new_domain['id'], new_domain)
+ group1 = {'domain_id': DEFAULT_DOMAIN_ID, 'name': uuid.uuid4().hex}
+ group2 = {'domain_id': new_domain['id'], 'name': group1['name']}
+ group1 = self.identity_api.create_group(group1)
+ group2 = self.identity_api.create_group(group2)
+
+ def test_move_group_between_domains(self):
+ domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.resource_api.create_domain(domain1['id'], domain1)
+ domain2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.resource_api.create_domain(domain2['id'], domain2)
+ group = {'name': uuid.uuid4().hex,
+ 'domain_id': domain1['id']}
+ group = self.identity_api.create_group(group)
+ group['domain_id'] = domain2['id']
+ self.identity_api.update_group(group['id'], group)
+
+ def test_move_group_between_domains_with_clashing_names_fails(self):
+ domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.resource_api.create_domain(domain1['id'], domain1)
+ domain2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.resource_api.create_domain(domain2['id'], domain2)
+ # First, create a group in domain1
+ group1 = {'name': uuid.uuid4().hex,
+ 'domain_id': domain1['id']}
+ group1 = self.identity_api.create_group(group1)
+ # Now create a group in domain2 with a potentially clashing
+ # name - which should work since we have domain separation
+ group2 = {'name': group1['name'],
+ 'domain_id': domain2['id']}
+ group2 = self.identity_api.create_group(group2)
+ # Now try and move group1 into the 2nd domain - which should
+ # fail since the names clash
+ group1['domain_id'] = domain2['id']
+ self.assertRaises(exception.Conflict,
+ self.identity_api.update_group,
+ group1['id'],
+ group1)
+
+ @tests.skip_if_no_multiple_domains_support
+ def test_project_crud(self):
+ domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
+ 'enabled': True}
+ self.resource_api.create_domain(domain['id'], domain)
+ project = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
+ 'domain_id': domain['id']}
+ self.resource_api.create_project(project['id'], project)
+ project_ref = self.resource_api.get_project(project['id'])
+ self.assertDictContainsSubset(project, project_ref)
+
+ project['name'] = uuid.uuid4().hex
+ self.resource_api.update_project(project['id'], project)
+ project_ref = self.resource_api.get_project(project['id'])
+ self.assertDictContainsSubset(project, project_ref)
+
+ self.resource_api.delete_project(project['id'])
+ self.assertRaises(exception.ProjectNotFound,
+ self.resource_api.get_project,
+ project['id'])
+
+ def test_domain_delete_hierarchy(self):
+ domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
+ 'enabled': True}
+ self.resource_api.create_domain(domain['id'], domain)
+
+ # Creating a root and a leaf project inside the domain
+ projects_hierarchy = self._create_projects_hierarchy(
+ domain_id=domain['id'])
+ root_project = projects_hierarchy[0]
+ leaf_project = projects_hierarchy[0]
+
+ # Disable the domain
+ domain['enabled'] = False
+ self.resource_api.update_domain(domain['id'], domain)
+
+ # Delete the domain
+ self.resource_api.delete_domain(domain['id'])
+
+ # Make sure the domain no longer exists
+ self.assertRaises(exception.DomainNotFound,
+ self.resource_api.get_domain,
+ domain['id'])
+
+ # Make sure the root project no longer exists
+ self.assertRaises(exception.ProjectNotFound,
+ self.resource_api.get_project,
+ root_project['id'])
+
+ # Make sure the leaf project no longer exists
+ self.assertRaises(exception.ProjectNotFound,
+ self.resource_api.get_project,
+ leaf_project['id'])
+
+ def test_hierarchical_projects_crud(self):
+ # create a hierarchy with just a root project (which is a leaf as well)
+ projects_hierarchy = self._create_projects_hierarchy(hierarchy_size=1)
+ root_project1 = projects_hierarchy[0]
+
+ # create a hierarchy with one root project and one leaf project
+ projects_hierarchy = self._create_projects_hierarchy()
+ root_project2 = projects_hierarchy[0]
+ leaf_project = projects_hierarchy[1]
+
+ # update description from leaf_project
+ leaf_project['description'] = 'new description'
+ self.resource_api.update_project(leaf_project['id'], leaf_project)
+ proj_ref = self.resource_api.get_project(leaf_project['id'])
+ self.assertDictEqual(proj_ref, leaf_project)
+
+ # update the parent_id is not allowed
+ leaf_project['parent_id'] = root_project1['id']
+ self.assertRaises(exception.ForbiddenAction,
+ self.resource_api.update_project,
+ leaf_project['id'],
+ leaf_project)
+
+ # delete root_project1
+ self.resource_api.delete_project(root_project1['id'])
+ self.assertRaises(exception.ProjectNotFound,
+ self.resource_api.get_project,
+ root_project1['id'])
+
+ # delete root_project2 is not allowed since it is not a leaf project
+ self.assertRaises(exception.ForbiddenAction,
+ self.resource_api.delete_project,
+ root_project2['id'])
+
+ def test_create_project_with_invalid_parent(self):
+ project = {'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'description': '',
+ 'domain_id': DEFAULT_DOMAIN_ID,
+ 'enabled': True,
+ 'parent_id': 'fake'}
+ self.assertRaises(exception.ProjectNotFound,
+ self.resource_api.create_project,
+ project['id'],
+ project)
+
+ def test_create_leaf_project_with_invalid_domain(self):
+ root_project = {'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'description': '',
+ 'domain_id': DEFAULT_DOMAIN_ID,
+ 'enabled': True,
+ 'parent_id': None}
+ self.resource_api.create_project(root_project['id'], root_project)
+
+ leaf_project = {'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'description': '',
+ 'domain_id': 'fake',
+ 'enabled': True,
+ 'parent_id': root_project['id']}
+
+ self.assertRaises(exception.ForbiddenAction,
+ self.resource_api.create_project,
+ leaf_project['id'],
+ leaf_project)
+
+ def test_delete_hierarchical_leaf_project(self):
+ projects_hierarchy = self._create_projects_hierarchy()
+ root_project = projects_hierarchy[0]
+ leaf_project = projects_hierarchy[1]
+
+ self.resource_api.delete_project(leaf_project['id'])
+ self.assertRaises(exception.ProjectNotFound,
+ self.resource_api.get_project,
+ leaf_project['id'])
+
+ self.resource_api.delete_project(root_project['id'])
+ self.assertRaises(exception.ProjectNotFound,
+ self.resource_api.get_project,
+ root_project['id'])
+
+ def test_delete_hierarchical_not_leaf_project(self):
+ projects_hierarchy = self._create_projects_hierarchy()
+ root_project = projects_hierarchy[0]
+
+ self.assertRaises(exception.ForbiddenAction,
+ self.resource_api.delete_project,
+ root_project['id'])
+
+ def test_update_project_parent(self):
+ projects_hierarchy = self._create_projects_hierarchy(hierarchy_size=3)
+ project1 = projects_hierarchy[0]
+ project2 = projects_hierarchy[1]
+ project3 = projects_hierarchy[2]
+
+ # project2 is the parent from project3
+ self.assertEqual(project3.get('parent_id'), project2['id'])
+
+ # try to update project3 parent to parent1
+ project3['parent_id'] = project1['id']
+ self.assertRaises(exception.ForbiddenAction,
+ self.resource_api.update_project,
+ project3['id'],
+ project3)
+
+ def test_create_project_under_disabled_one(self):
+ project1 = {'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'domain_id': DEFAULT_DOMAIN_ID,
+ 'enabled': False,
+ 'parent_id': None}
+ self.resource_api.create_project(project1['id'], project1)
+
+ project2 = {'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'domain_id': DEFAULT_DOMAIN_ID,
+ 'parent_id': project1['id']}
+
+ # It's not possible to create a project under a disabled one in the
+ # hierarchy
+ self.assertRaises(exception.ForbiddenAction,
+ self.resource_api.create_project,
+ project2['id'],
+ project2)
+
+ def test_disable_hierarchical_leaf_project(self):
+ projects_hierarchy = self._create_projects_hierarchy()
+ leaf_project = projects_hierarchy[1]
+
+ leaf_project['enabled'] = False
+ self.resource_api.update_project(leaf_project['id'], leaf_project)
+
+ project_ref = self.resource_api.get_project(leaf_project['id'])
+ self.assertEqual(project_ref['enabled'], leaf_project['enabled'])
+
+ def test_disable_hierarchical_not_leaf_project(self):
+ projects_hierarchy = self._create_projects_hierarchy()
+ root_project = projects_hierarchy[0]
+
+ root_project['enabled'] = False
+ self.assertRaises(exception.ForbiddenAction,
+ self.resource_api.update_project,
+ root_project['id'],
+ root_project)
+
+ def test_enable_project_with_disabled_parent(self):
+ projects_hierarchy = self._create_projects_hierarchy()
+ root_project = projects_hierarchy[0]
+ leaf_project = projects_hierarchy[1]
+
+ # Disable leaf and root
+ leaf_project['enabled'] = False
+ self.resource_api.update_project(leaf_project['id'], leaf_project)
+ root_project['enabled'] = False
+ self.resource_api.update_project(root_project['id'], root_project)
+
+ # Try to enable the leaf project, it's not possible since it has
+ # a disabled parent
+ leaf_project['enabled'] = True
+ self.assertRaises(exception.ForbiddenAction,
+ self.resource_api.update_project,
+ leaf_project['id'],
+ leaf_project)
+
+ def _get_hierarchy_depth(self, project_id):
+ return len(self.resource_api.list_project_parents(project_id)) + 1
+
+ def test_check_hierarchy_depth(self):
+ # First create a hierarchy with the max allowed depth
+ projects_hierarchy = self._create_projects_hierarchy(
+ CONF.max_project_tree_depth)
+ leaf_project = projects_hierarchy[CONF.max_project_tree_depth - 1]
+
+ depth = self._get_hierarchy_depth(leaf_project['id'])
+ self.assertEqual(CONF.max_project_tree_depth, depth)
+
+ # Creating another project in the hierarchy shouldn't be allowed
+ project_id = uuid.uuid4().hex
+ project = {
+ 'id': project_id,
+ 'name': uuid.uuid4().hex,
+ 'domain_id': DEFAULT_DOMAIN_ID,
+ 'parent_id': leaf_project['id']}
+ self.assertRaises(exception.ForbiddenAction,
+ self.resource_api.create_project,
+ project_id,
+ project)
+
+ def test_project_update_missing_attrs_with_a_value(self):
+ # Creating a project with no description attribute.
+ project = {'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'domain_id': DEFAULT_DOMAIN_ID,
+ 'enabled': True,
+ 'parent_id': None}
+ self.resource_api.create_project(project['id'], project)
+
+ # Add a description attribute.
+ project['description'] = uuid.uuid4().hex
+ self.resource_api.update_project(project['id'], project)
+
+ project_ref = self.resource_api.get_project(project['id'])
+ self.assertDictEqual(project_ref, project)
+
+ def test_project_update_missing_attrs_with_a_falsey_value(self):
+ # Creating a project with no description attribute.
+ project = {'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'domain_id': DEFAULT_DOMAIN_ID,
+ 'enabled': True,
+ 'parent_id': None}
+ self.resource_api.create_project(project['id'], project)
+
+ # Add a description attribute.
+ project['description'] = ''
+ self.resource_api.update_project(project['id'], project)
+
+ project_ref = self.resource_api.get_project(project['id'])
+ self.assertDictEqual(project_ref, project)
+
+ def test_domain_crud(self):
+ domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
+ 'enabled': True}
+ self.resource_api.create_domain(domain['id'], domain)
+ domain_ref = self.resource_api.get_domain(domain['id'])
+ self.assertDictEqual(domain_ref, domain)
+
+ domain['name'] = uuid.uuid4().hex
+ self.resource_api.update_domain(domain['id'], domain)
+ domain_ref = self.resource_api.get_domain(domain['id'])
+ self.assertDictEqual(domain_ref, domain)
+
+ # Ensure an 'enabled' domain cannot be deleted
+ self.assertRaises(exception.ForbiddenAction,
+ self.resource_api.delete_domain,
+ domain_id=domain['id'])
+
+ # Disable the domain
+ domain['enabled'] = False
+ self.resource_api.update_domain(domain['id'], domain)
+
+ # Delete the domain
+ self.resource_api.delete_domain(domain['id'])
+
+ # Make sure the domain no longer exists
+ self.assertRaises(exception.DomainNotFound,
+ self.resource_api.get_domain,
+ domain['id'])
+
+ @tests.skip_if_no_multiple_domains_support
+ def test_create_domain_case_sensitivity(self):
+ # create a ref with a lowercase name
+ ref = {
+ 'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex.lower()}
+ self.resource_api.create_domain(ref['id'], ref)
+
+ # assign a new ID with the same name, but this time in uppercase
+ ref['id'] = uuid.uuid4().hex
+ ref['name'] = ref['name'].upper()
+ self.resource_api.create_domain(ref['id'], ref)
+
+ def test_attribute_update(self):
+ project = {
+ 'domain_id': DEFAULT_DOMAIN_ID,
+ 'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex}
+ self.resource_api.create_project(project['id'], project)
+
+ # pick a key known to be non-existent
+ key = 'description'
+
+ def assert_key_equals(value):
+ project_ref = self.resource_api.update_project(
+ project['id'], project)
+ self.assertEqual(value, project_ref[key])
+ project_ref = self.resource_api.get_project(project['id'])
+ self.assertEqual(value, project_ref[key])
+
+ def assert_get_key_is(value):
+ project_ref = self.resource_api.update_project(
+ project['id'], project)
+ self.assertIs(project_ref.get(key), value)
+ project_ref = self.resource_api.get_project(project['id'])
+ self.assertIs(project_ref.get(key), value)
+
+ # add an attribute that doesn't exist, set it to a falsey value
+ value = ''
+ project[key] = value
+ assert_key_equals(value)
+
+ # set an attribute with a falsey value to null
+ value = None
+ project[key] = value
+ assert_get_key_is(value)
+
+ # do it again, in case updating from this situation is handled oddly
+ value = None
+ project[key] = value
+ assert_get_key_is(value)
+
+ # set a possibly-null value to a falsey value
+ value = ''
+ project[key] = value
+ assert_key_equals(value)
+
+ # set a falsey value to a truthy value
+ value = uuid.uuid4().hex
+ project[key] = value
+ assert_key_equals(value)
+
+ def test_user_crud(self):
+ user_dict = {'domain_id': DEFAULT_DOMAIN_ID,
+ 'name': uuid.uuid4().hex, 'password': 'passw0rd'}
+ user = self.identity_api.create_user(user_dict)
+ user_ref = self.identity_api.get_user(user['id'])
+ del user_dict['password']
+ user_ref_dict = {x: user_ref[x] for x in user_ref}
+ self.assertDictContainsSubset(user_dict, user_ref_dict)
+
+ user_dict['password'] = uuid.uuid4().hex
+ self.identity_api.update_user(user['id'], user_dict)
+ user_ref = self.identity_api.get_user(user['id'])
+ del user_dict['password']
+ user_ref_dict = {x: user_ref[x] for x in user_ref}
+ self.assertDictContainsSubset(user_dict, user_ref_dict)
+
+ self.identity_api.delete_user(user['id'])
+ self.assertRaises(exception.UserNotFound,
+ self.identity_api.get_user,
+ user['id'])
+
+ def test_list_projects_for_user(self):
+ domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.resource_api.create_domain(domain['id'], domain)
+ user1 = {'name': uuid.uuid4().hex, 'password': uuid.uuid4().hex,
+ 'domain_id': domain['id'], 'enabled': True}
+ user1 = self.identity_api.create_user(user1)
+ user_projects = self.assignment_api.list_projects_for_user(user1['id'])
+ self.assertEqual(0, len(user_projects))
+ self.assignment_api.create_grant(user_id=user1['id'],
+ project_id=self.tenant_bar['id'],
+ role_id=self.role_member['id'])
+ self.assignment_api.create_grant(user_id=user1['id'],
+ project_id=self.tenant_baz['id'],
+ role_id=self.role_member['id'])
+ user_projects = self.assignment_api.list_projects_for_user(user1['id'])
+ self.assertEqual(2, len(user_projects))
+
+ def test_list_projects_for_user_with_grants(self):
+ # Create two groups each with a role on a different project, and
+ # make user1 a member of both groups. Both these new projects
+ # should now be included, along with any direct user grants.
+ domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.resource_api.create_domain(domain['id'], domain)
+ user1 = {'name': uuid.uuid4().hex, 'password': uuid.uuid4().hex,
+ 'domain_id': domain['id'], 'enabled': True}
+ user1 = self.identity_api.create_user(user1)
+ group1 = {'name': uuid.uuid4().hex, 'domain_id': domain['id']}
+ group1 = self.identity_api.create_group(group1)
+ group2 = {'name': uuid.uuid4().hex, 'domain_id': domain['id']}
+ group2 = self.identity_api.create_group(group2)
+ project1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
+ 'domain_id': domain['id']}
+ self.resource_api.create_project(project1['id'], project1)
+ project2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
+ 'domain_id': domain['id']}
+ self.resource_api.create_project(project2['id'], project2)
+ self.identity_api.add_user_to_group(user1['id'], group1['id'])
+ self.identity_api.add_user_to_group(user1['id'], group2['id'])
+
+ # Create 3 grants, one user grant, the other two as group grants
+ self.assignment_api.create_grant(user_id=user1['id'],
+ project_id=self.tenant_bar['id'],
+ role_id=self.role_member['id'])
+ self.assignment_api.create_grant(group_id=group1['id'],
+ project_id=project1['id'],
+ role_id=self.role_admin['id'])
+ self.assignment_api.create_grant(group_id=group2['id'],
+ project_id=project2['id'],
+ role_id=self.role_admin['id'])
+ user_projects = self.assignment_api.list_projects_for_user(user1['id'])
+ self.assertEqual(3, len(user_projects))
+
+ @tests.skip_if_cache_disabled('resource')
+ @tests.skip_if_no_multiple_domains_support
+ def test_domain_rename_invalidates_get_domain_by_name_cache(self):
+ domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
+ 'enabled': True}
+ domain_id = domain['id']
+ domain_name = domain['name']
+ self.resource_api.create_domain(domain_id, domain)
+ domain_ref = self.resource_api.get_domain_by_name(domain_name)
+ domain_ref['name'] = uuid.uuid4().hex
+ self.resource_api.update_domain(domain_id, domain_ref)
+ self.assertRaises(exception.DomainNotFound,
+ self.resource_api.get_domain_by_name,
+ domain_name)
+
+ @tests.skip_if_cache_disabled('resource')
+ def test_cache_layer_domain_crud(self):
+ domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
+ 'enabled': True}
+ domain_id = domain['id']
+ # Create Domain
+ self.resource_api.create_domain(domain_id, domain)
+ domain_ref = self.resource_api.get_domain(domain_id)
+ updated_domain_ref = copy.deepcopy(domain_ref)
+ updated_domain_ref['name'] = uuid.uuid4().hex
+ # Update domain, bypassing resource api manager
+ self.resource_api.driver.update_domain(domain_id, updated_domain_ref)
+ # Verify get_domain still returns the domain
+ self.assertDictContainsSubset(
+ domain_ref, self.resource_api.get_domain(domain_id))
+ # Invalidate cache
+ self.resource_api.get_domain.invalidate(self.resource_api,
+ domain_id)
+ # Verify get_domain returns the updated domain
+ self.assertDictContainsSubset(
+ updated_domain_ref, self.resource_api.get_domain(domain_id))
+ # Update the domain back to original ref, using the assignment api
+ # manager
+ self.resource_api.update_domain(domain_id, domain_ref)
+ self.assertDictContainsSubset(
+ domain_ref, self.resource_api.get_domain(domain_id))
+ # Make sure domain is 'disabled', bypass resource api manager
+ domain_ref_disabled = domain_ref.copy()
+ domain_ref_disabled['enabled'] = False
+ self.resource_api.driver.update_domain(domain_id,
+ domain_ref_disabled)
+ # Delete domain, bypassing resource api manager
+ self.resource_api.driver.delete_domain(domain_id)
+ # Verify get_domain still returns the domain
+ self.assertDictContainsSubset(
+ domain_ref, self.resource_api.get_domain(domain_id))
+ # Invalidate cache
+ self.resource_api.get_domain.invalidate(self.resource_api,
+ domain_id)
+ # Verify get_domain now raises DomainNotFound
+ self.assertRaises(exception.DomainNotFound,
+ self.resource_api.get_domain, domain_id)
+ # Recreate Domain
+ self.resource_api.create_domain(domain_id, domain)
+ self.resource_api.get_domain(domain_id)
+ # Make sure domain is 'disabled', bypass resource api manager
+ domain['enabled'] = False
+ self.resource_api.driver.update_domain(domain_id, domain)
+ # Delete domain
+ self.resource_api.delete_domain(domain_id)
+ # verify DomainNotFound raised
+ self.assertRaises(exception.DomainNotFound,
+ self.resource_api.get_domain,
+ domain_id)
+
+ @tests.skip_if_cache_disabled('resource')
+ @tests.skip_if_no_multiple_domains_support
+ def test_project_rename_invalidates_get_project_by_name_cache(self):
+ domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
+ 'enabled': True}
+ project = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
+ 'domain_id': domain['id']}
+ project_id = project['id']
+ project_name = project['name']
+ self.resource_api.create_domain(domain['id'], domain)
+ # Create a project
+ self.resource_api.create_project(project_id, project)
+ self.resource_api.get_project_by_name(project_name, domain['id'])
+ project['name'] = uuid.uuid4().hex
+ self.resource_api.update_project(project_id, project)
+ self.assertRaises(exception.ProjectNotFound,
+ self.resource_api.get_project_by_name,
+ project_name,
+ domain['id'])
+
+ @tests.skip_if_cache_disabled('resource')
+ @tests.skip_if_no_multiple_domains_support
+ def test_cache_layer_project_crud(self):
+ domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
+ 'enabled': True}
+ project = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
+ 'domain_id': domain['id']}
+ project_id = project['id']
+ self.resource_api.create_domain(domain['id'], domain)
+ # Create a project
+ self.resource_api.create_project(project_id, project)
+ self.resource_api.get_project(project_id)
+ updated_project = copy.deepcopy(project)
+ updated_project['name'] = uuid.uuid4().hex
+ # Update project, bypassing resource manager
+ self.resource_api.driver.update_project(project_id,
+ updated_project)
+ # Verify get_project still returns the original project_ref
+ self.assertDictContainsSubset(
+ project, self.resource_api.get_project(project_id))
+ # Invalidate cache
+ self.resource_api.get_project.invalidate(self.resource_api,
+ project_id)
+ # Verify get_project now returns the new project
+ self.assertDictContainsSubset(
+ updated_project,
+ self.resource_api.get_project(project_id))
+ # Update project using the resource_api manager back to original
+ self.resource_api.update_project(project['id'], project)
+ # Verify get_project returns the original project_ref
+ self.assertDictContainsSubset(
+ project, self.resource_api.get_project(project_id))
+ # Delete project bypassing resource
+ self.resource_api.driver.delete_project(project_id)
+ # Verify get_project still returns the project_ref
+ self.assertDictContainsSubset(
+ project, self.resource_api.get_project(project_id))
+ # Invalidate cache
+ self.resource_api.get_project.invalidate(self.resource_api,
+ project_id)
+ # Verify ProjectNotFound now raised
+ self.assertRaises(exception.ProjectNotFound,
+ self.resource_api.get_project,
+ project_id)
+ # recreate project
+ self.resource_api.create_project(project_id, project)
+ self.resource_api.get_project(project_id)
+ # delete project
+ self.resource_api.delete_project(project_id)
+ # Verify ProjectNotFound is raised
+ self.assertRaises(exception.ProjectNotFound,
+ self.resource_api.get_project,
+ project_id)
+
+ def create_user_dict(self, **attributes):
+ user_dict = {'name': uuid.uuid4().hex,
+ 'domain_id': DEFAULT_DOMAIN_ID,
+ 'enabled': True}
+ user_dict.update(attributes)
+ return user_dict
+
+ def test_arbitrary_attributes_are_returned_from_create_user(self):
+ attr_value = uuid.uuid4().hex
+ user_data = self.create_user_dict(arbitrary_attr=attr_value)
+
+ user = self.identity_api.create_user(user_data)
+
+ self.assertEqual(attr_value, user['arbitrary_attr'])
+
+ def test_arbitrary_attributes_are_returned_from_get_user(self):
+ attr_value = uuid.uuid4().hex
+ user_data = self.create_user_dict(arbitrary_attr=attr_value)
+
+ user_data = self.identity_api.create_user(user_data)
+
+ user = self.identity_api.get_user(user_data['id'])
+ self.assertEqual(attr_value, user['arbitrary_attr'])
+
+ def test_new_arbitrary_attributes_are_returned_from_update_user(self):
+ user_data = self.create_user_dict()
+
+ user = self.identity_api.create_user(user_data)
+ attr_value = uuid.uuid4().hex
+ user['arbitrary_attr'] = attr_value
+ updated_user = self.identity_api.update_user(user['id'], user)
+
+ self.assertEqual(attr_value, updated_user['arbitrary_attr'])
+
+ def test_updated_arbitrary_attributes_are_returned_from_update_user(self):
+ attr_value = uuid.uuid4().hex
+ user_data = self.create_user_dict(arbitrary_attr=attr_value)
+
+ new_attr_value = uuid.uuid4().hex
+ user = self.identity_api.create_user(user_data)
+ user['arbitrary_attr'] = new_attr_value
+ updated_user = self.identity_api.update_user(user['id'], user)
+
+ self.assertEqual(new_attr_value, updated_user['arbitrary_attr'])
+
+ def test_create_grant_no_user(self):
+ # If call create_grant with a user that doesn't exist, doesn't fail.
+ self.assignment_api.create_grant(
+ self.role_other['id'],
+ user_id=uuid.uuid4().hex,
+ project_id=self.tenant_bar['id'])
+
+ def test_create_grant_no_group(self):
+ # If call create_grant with a group that doesn't exist, doesn't fail.
+ self.assignment_api.create_grant(
+ self.role_other['id'],
+ group_id=uuid.uuid4().hex,
+ project_id=self.tenant_bar['id'])
+
+ @tests.skip_if_no_multiple_domains_support
+ def test_get_default_domain_by_name(self):
+ domain_name = 'default'
+
+ domain = {'id': uuid.uuid4().hex, 'name': domain_name, 'enabled': True}
+ self.resource_api.create_domain(domain['id'], domain)
+
+ domain_ref = self.resource_api.get_domain_by_name(domain_name)
+ self.assertEqual(domain, domain_ref)
+
+ def test_get_not_default_domain_by_name(self):
+ domain_name = 'foo'
+ self.assertRaises(exception.DomainNotFound,
+ self.resource_api.get_domain_by_name,
+ domain_name)
+
+ def test_project_update_and_project_get_return_same_response(self):
+ project = {
+ 'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'domain_id': CONF.identity.default_domain_id,
+ 'description': uuid.uuid4().hex,
+ 'enabled': True}
+
+ self.resource_api.create_project(project['id'], project)
+
+ updated_project = {'enabled': False}
+ updated_project_ref = self.resource_api.update_project(
+ project['id'], updated_project)
+
+ # SQL backend adds 'extra' field
+ updated_project_ref.pop('extra', None)
+
+ self.assertIs(False, updated_project_ref['enabled'])
+
+ project_ref = self.resource_api.get_project(project['id'])
+ self.assertDictEqual(project_ref, updated_project_ref)
+
+ def test_user_update_and_user_get_return_same_response(self):
+ user = {
+ 'name': uuid.uuid4().hex,
+ 'domain_id': CONF.identity.default_domain_id,
+ 'description': uuid.uuid4().hex,
+ 'enabled': True}
+
+ user = self.identity_api.create_user(user)
+
+ updated_user = {'enabled': False}
+ updated_user_ref = self.identity_api.update_user(
+ user['id'], updated_user)
+
+ # SQL backend adds 'extra' field
+ updated_user_ref.pop('extra', None)
+
+ self.assertIs(False, updated_user_ref['enabled'])
+
+ user_ref = self.identity_api.get_user(user['id'])
+ self.assertDictEqual(user_ref, updated_user_ref)
+
+ def test_delete_group_removes_role_assignments(self):
+ # When a group is deleted any role assignments for the group are
+ # removed.
+
+ MEMBER_ROLE_ID = 'member'
+
+ def get_member_assignments():
+ assignments = self.assignment_api.list_role_assignments()
+ return filter(lambda x: x['role_id'] == MEMBER_ROLE_ID,
+ assignments)
+
+ orig_member_assignments = get_member_assignments()
+
+ # Create a group.
+ new_group = {
+ 'domain_id': DEFAULT_DOMAIN_ID,
+ 'name': self.getUniqueString(prefix='tdgrra')}
+ new_group = self.identity_api.create_group(new_group)
+
+ # Create a project.
+ new_project = {
+ 'id': uuid.uuid4().hex,
+ 'name': self.getUniqueString(prefix='tdgrra'),
+ 'domain_id': DEFAULT_DOMAIN_ID}
+ self.resource_api.create_project(new_project['id'], new_project)
+
+ # Assign a role to the group.
+ self.assignment_api.create_grant(
+ group_id=new_group['id'], project_id=new_project['id'],
+ role_id=MEMBER_ROLE_ID)
+
+ # Delete the group.
+ self.identity_api.delete_group(new_group['id'])
+
+ # Check that the role assignment for the group is gone
+ member_assignments = get_member_assignments()
+
+ self.assertThat(member_assignments,
+ matchers.Equals(orig_member_assignments))
+
+ def test_get_roles_for_groups_on_domain(self):
+ """Test retrieving group domain roles.
+
+ Test Plan:
+
+ - Create a domain, three groups and three roles
+ - Assign one an inherited and the others a non-inherited group role
+ to the domain
+ - Ensure that only the non-inherited roles are returned on the domain
+
+ """
+ domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.resource_api.create_domain(domain1['id'], domain1)
+ group_list = []
+ group_id_list = []
+ role_list = []
+ for _ in range(3):
+ group = {'name': uuid.uuid4().hex, 'domain_id': domain1['id']}
+ group = self.identity_api.create_group(group)
+ group_list.append(group)
+ group_id_list.append(group['id'])
+
+ role = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.role_api.create_role(role['id'], role)
+ role_list.append(role)
+
+ # Assign the roles - one is inherited
+ self.assignment_api.create_grant(group_id=group_list[0]['id'],
+ domain_id=domain1['id'],
+ role_id=role_list[0]['id'])
+ self.assignment_api.create_grant(group_id=group_list[1]['id'],
+ domain_id=domain1['id'],
+ role_id=role_list[1]['id'])
+ self.assignment_api.create_grant(group_id=group_list[2]['id'],
+ domain_id=domain1['id'],
+ role_id=role_list[2]['id'],
+ inherited_to_projects=True)
+
+ # Now get the effective roles for the groups on the domain project. We
+ # shouldn't get back the inherited role.
+
+ role_refs = self.assignment_api.get_roles_for_groups(
+ group_id_list, domain_id=domain1['id'])
+
+ self.assertThat(role_refs, matchers.HasLength(2))
+ self.assertIn(role_list[0], role_refs)
+ self.assertIn(role_list[1], role_refs)
+
+ def test_get_roles_for_groups_on_project(self):
+ """Test retrieving group project roles.
+
+ Test Plan:
+
+ - Create two domains, two projects, six groups and six roles
+ - Project1 is in Domain1, Project2 is in Domain2
+ - Domain2/Project2 are spoilers
+ - Assign a different direct group role to each project as well
+ as both an inherited and non-inherited role to each domain
+ - Get the group roles for Project 1 - depending on whether we have
+ enabled inheritance, we should either get back just the direct role
+ or both the direct one plus the inherited domain role from Domain 1
+
+ """
+ domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.resource_api.create_domain(domain1['id'], domain1)
+ domain2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.resource_api.create_domain(domain2['id'], domain2)
+ project1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
+ 'domain_id': domain1['id']}
+ self.resource_api.create_project(project1['id'], project1)
+ project2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
+ 'domain_id': domain2['id']}
+ self.resource_api.create_project(project2['id'], project2)
+ group_list = []
+ group_id_list = []
+ role_list = []
+ for _ in range(6):
+ group = {'name': uuid.uuid4().hex, 'domain_id': domain1['id']}
+ group = self.identity_api.create_group(group)
+ group_list.append(group)
+ group_id_list.append(group['id'])
+
+ role = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.role_api.create_role(role['id'], role)
+ role_list.append(role)
+
+ # Assign the roles - one inherited and one non-inherited on Domain1,
+ # plus one on Project1
+ self.assignment_api.create_grant(group_id=group_list[0]['id'],
+ domain_id=domain1['id'],
+ role_id=role_list[0]['id'])
+ self.assignment_api.create_grant(group_id=group_list[1]['id'],
+ domain_id=domain1['id'],
+ role_id=role_list[1]['id'],
+ inherited_to_projects=True)
+ self.assignment_api.create_grant(group_id=group_list[2]['id'],
+ project_id=project1['id'],
+ role_id=role_list[2]['id'])
+
+ # ...and a duplicate set of spoiler assignments to Domain2/Project2
+ self.assignment_api.create_grant(group_id=group_list[3]['id'],
+ domain_id=domain2['id'],
+ role_id=role_list[3]['id'])
+ self.assignment_api.create_grant(group_id=group_list[4]['id'],
+ domain_id=domain2['id'],
+ role_id=role_list[4]['id'],
+ inherited_to_projects=True)
+ self.assignment_api.create_grant(group_id=group_list[5]['id'],
+ project_id=project2['id'],
+ role_id=role_list[5]['id'])
+
+ # Now get the effective roles for all groups on the Project1. With
+ # inheritance off, we should only get back the direct role.
+
+ self.config_fixture.config(group='os_inherit', enabled=False)
+ role_refs = self.assignment_api.get_roles_for_groups(
+ group_id_list, project_id=project1['id'])
+
+ self.assertThat(role_refs, matchers.HasLength(1))
+ self.assertIn(role_list[2], role_refs)
+
+ # With inheritance on, we should also get back the inherited role from
+ # its owning domain.
+
+ self.config_fixture.config(group='os_inherit', enabled=True)
+ role_refs = self.assignment_api.get_roles_for_groups(
+ group_id_list, project_id=project1['id'])
+
+ self.assertThat(role_refs, matchers.HasLength(2))
+ self.assertIn(role_list[1], role_refs)
+ self.assertIn(role_list[2], role_refs)
+
+ def test_list_domains_for_groups(self):
+ """Test retrieving domains for a list of groups.
+
+ Test Plan:
+
+ - Create three domains, three groups and one role
+ - Assign a non-inherited group role to two domains, and an inherited
+ group role to the third
+ - Ensure only the domains with non-inherited roles are returned
+
+ """
+ domain_list = []
+ group_list = []
+ group_id_list = []
+ for _ in range(3):
+ domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.resource_api.create_domain(domain['id'], domain)
+ domain_list.append(domain)
+
+ group = {'name': uuid.uuid4().hex, 'domain_id': domain['id']}
+ group = self.identity_api.create_group(group)
+ group_list.append(group)
+ group_id_list.append(group['id'])
+
+ role1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.role_api.create_role(role1['id'], role1)
+
+ # Assign the roles - one is inherited
+ self.assignment_api.create_grant(group_id=group_list[0]['id'],
+ domain_id=domain_list[0]['id'],
+ role_id=role1['id'])
+ self.assignment_api.create_grant(group_id=group_list[1]['id'],
+ domain_id=domain_list[1]['id'],
+ role_id=role1['id'])
+ self.assignment_api.create_grant(group_id=group_list[2]['id'],
+ domain_id=domain_list[2]['id'],
+ role_id=role1['id'],
+ inherited_to_projects=True)
+
+ # Now list the domains that have roles for any of the 3 groups
+ # We shouldn't get back domain[2] since that had an inherited role.
+
+ domain_refs = (
+ self.assignment_api.list_domains_for_groups(group_id_list))
+
+ self.assertThat(domain_refs, matchers.HasLength(2))
+ self.assertIn(domain_list[0], domain_refs)
+ self.assertIn(domain_list[1], domain_refs)
+
+ def test_list_projects_for_groups(self):
+ """Test retrieving projects for a list of groups.
+
+ Test Plan:
+
+ - Create two domains, four projects, seven groups and seven roles
+ - Project1-3 are in Domain1, Project4 is in Domain2
+ - Domain2/Project4 are spoilers
+ - Project1 and 2 have direct group roles, Project3 has no direct
+ roles but should inherit a group role from Domain1
+ - Get the projects for the group roles that are assigned to Project1
+ Project2 and the inherited one on Domain1. Depending on whether we
+ have enabled inheritance, we should either get back just the projects
+ with direct roles (Project 1 and 2) or also Project3 due to its
+ inherited role from Domain1.
+
+ """
+ domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.resource_api.create_domain(domain1['id'], domain1)
+ domain2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.resource_api.create_domain(domain2['id'], domain2)
+ project1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
+ 'domain_id': domain1['id']}
+ project1 = self.resource_api.create_project(project1['id'], project1)
+ project2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
+ 'domain_id': domain1['id']}
+ project2 = self.resource_api.create_project(project2['id'], project2)
+ project3 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
+ 'domain_id': domain1['id']}
+ project3 = self.resource_api.create_project(project3['id'], project3)
+ project4 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
+ 'domain_id': domain2['id']}
+ project4 = self.resource_api.create_project(project4['id'], project4)
+ group_list = []
+ role_list = []
+ for _ in range(7):
+ group = {'name': uuid.uuid4().hex, 'domain_id': domain1['id']}
+ group = self.identity_api.create_group(group)
+ group_list.append(group)
+
+ role = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.role_api.create_role(role['id'], role)
+ role_list.append(role)
+
+ # Assign the roles - one inherited and one non-inherited on Domain1,
+ # plus one on Project1 and Project2
+ self.assignment_api.create_grant(group_id=group_list[0]['id'],
+ domain_id=domain1['id'],
+ role_id=role_list[0]['id'])
+ self.assignment_api.create_grant(group_id=group_list[1]['id'],
+ domain_id=domain1['id'],
+ role_id=role_list[1]['id'],
+ inherited_to_projects=True)
+ self.assignment_api.create_grant(group_id=group_list[2]['id'],
+ project_id=project1['id'],
+ role_id=role_list[2]['id'])
+ self.assignment_api.create_grant(group_id=group_list[3]['id'],
+ project_id=project2['id'],
+ role_id=role_list[3]['id'])
+
+ # ...and a few of spoiler assignments to Domain2/Project4
+ self.assignment_api.create_grant(group_id=group_list[4]['id'],
+ domain_id=domain2['id'],
+ role_id=role_list[4]['id'])
+ self.assignment_api.create_grant(group_id=group_list[5]['id'],
+ domain_id=domain2['id'],
+ role_id=role_list[5]['id'],
+ inherited_to_projects=True)
+ self.assignment_api.create_grant(group_id=group_list[6]['id'],
+ project_id=project4['id'],
+ role_id=role_list[6]['id'])
+
+ # Now get the projects for the groups that have roles on Project1,
+ # Project2 and the inherited role on Domain!. With inheritance off,
+ # we should only get back the projects with direct role.
+
+ self.config_fixture.config(group='os_inherit', enabled=False)
+ group_id_list = [group_list[1]['id'], group_list[2]['id'],
+ group_list[3]['id']]
+ project_refs = (
+ self.assignment_api.list_projects_for_groups(group_id_list))
+
+ self.assertThat(project_refs, matchers.HasLength(2))
+ self.assertIn(project1, project_refs)
+ self.assertIn(project2, project_refs)
+
+ # With inheritance on, we should also get back the Project3 due to the
+ # inherited role from its owning domain.
+
+ self.config_fixture.config(group='os_inherit', enabled=True)
+ project_refs = (
+ self.assignment_api.list_projects_for_groups(group_id_list))
+
+ self.assertThat(project_refs, matchers.HasLength(3))
+ self.assertIn(project1, project_refs)
+ self.assertIn(project2, project_refs)
+ self.assertIn(project3, project_refs)
+
+ def test_update_role_no_name(self):
+ # A user can update a role and not include the name.
+
+ # description is picked just because it's not name.
+ self.role_api.update_role(self.role_member['id'],
+ {'description': uuid.uuid4().hex})
+ # If the previous line didn't raise an exception then the test passes.
+
+ def test_update_role_same_name(self):
+ # A user can update a role and set the name to be the same as it was.
+
+ self.role_api.update_role(self.role_member['id'],
+ {'name': self.role_member['name']})
+ # If the previous line didn't raise an exception then the test passes.
+
+
+class TokenTests(object):
+ def _create_token_id(self):
+ # Use a token signed by the cms module
+ token_id = ""
+ for i in range(1, 20):
+ token_id += uuid.uuid4().hex
+ return cms.cms_sign_token(token_id,
+ CONF.signing.certfile,
+ CONF.signing.keyfile)
+
+ def _assert_revoked_token_list_matches_token_persistence(
+ self, revoked_token_id_list):
+ # Assert that the list passed in matches the list returned by the
+ # token persistence service
+ persistence_list = [
+ x['id']
+ for x in self.token_provider_api.list_revoked_tokens()
+ ]
+ self.assertEqual(persistence_list, revoked_token_id_list)
+
+ def test_token_crud(self):
+ token_id = self._create_token_id()
+ data = {'id': token_id, 'a': 'b',
+ 'trust_id': None,
+ 'user': {'id': 'testuserid'}}
+ data_ref = self.token_provider_api._persistence.create_token(token_id,
+ data)
+ expires = data_ref.pop('expires')
+ data_ref.pop('user_id')
+ self.assertIsInstance(expires, datetime.datetime)
+ data_ref.pop('id')
+ data.pop('id')
+ self.assertDictEqual(data_ref, data)
+
+ new_data_ref = self.token_provider_api._persistence.get_token(token_id)
+ expires = new_data_ref.pop('expires')
+ self.assertIsInstance(expires, datetime.datetime)
+ new_data_ref.pop('user_id')
+ new_data_ref.pop('id')
+
+ self.assertEqual(data, new_data_ref)
+
+ self.token_provider_api._persistence.delete_token(token_id)
+ self.assertRaises(
+ exception.TokenNotFound,
+ self.token_provider_api._persistence.get_token, token_id)
+ self.assertRaises(
+ exception.TokenNotFound,
+ self.token_provider_api._persistence.delete_token, token_id)
+
+ def create_token_sample_data(self, token_id=None, tenant_id=None,
+ trust_id=None, user_id=None, expires=None):
+ if token_id is None:
+ token_id = self._create_token_id()
+ if user_id is None:
+ user_id = 'testuserid'
+ # FIXME(morganfainberg): These tokens look nothing like "Real" tokens.
+ # This should be fixed when token issuance is cleaned up.
+ data = {'id': token_id, 'a': 'b',
+ 'user': {'id': user_id}}
+ if tenant_id is not None:
+ data['tenant'] = {'id': tenant_id, 'name': tenant_id}
+ if tenant_id is NULL_OBJECT:
+ data['tenant'] = None
+ if expires is not None:
+ data['expires'] = expires
+ if trust_id is not None:
+ data['trust_id'] = trust_id
+ data.setdefault('access', {}).setdefault('trust', {})
+ # Testuserid2 is used here since a trustee will be different in
+ # the cases of impersonation and therefore should not match the
+ # token's user_id.
+ data['access']['trust']['trustee_user_id'] = 'testuserid2'
+ data['token_version'] = provider.V2
+ # Issue token stores a copy of all token data at token['token_data'].
+ # This emulates that assumption as part of the test.
+ data['token_data'] = copy.deepcopy(data)
+ new_token = self.token_provider_api._persistence.create_token(token_id,
+ data)
+ return new_token['id'], data
+
+ def test_delete_tokens(self):
+ tokens = self.token_provider_api._persistence._list_tokens(
+ 'testuserid')
+ self.assertEqual(0, len(tokens))
+ token_id1, data = self.create_token_sample_data(
+ tenant_id='testtenantid')
+ token_id2, data = self.create_token_sample_data(
+ tenant_id='testtenantid')
+ token_id3, data = self.create_token_sample_data(
+ tenant_id='testtenantid',
+ user_id='testuserid1')
+ tokens = self.token_provider_api._persistence._list_tokens(
+ 'testuserid')
+ self.assertEqual(2, len(tokens))
+ self.assertIn(token_id2, tokens)
+ self.assertIn(token_id1, tokens)
+ self.token_provider_api._persistence.delete_tokens(
+ user_id='testuserid',
+ tenant_id='testtenantid')
+ tokens = self.token_provider_api._persistence._list_tokens(
+ 'testuserid')
+ self.assertEqual(0, len(tokens))
+ self.assertRaises(exception.TokenNotFound,
+ self.token_provider_api._persistence.get_token,
+ token_id1)
+ self.assertRaises(exception.TokenNotFound,
+ self.token_provider_api._persistence.get_token,
+ token_id2)
+
+ self.token_provider_api._persistence.get_token(token_id3)
+
+ def test_delete_tokens_trust(self):
+ tokens = self.token_provider_api._persistence._list_tokens(
+ user_id='testuserid')
+ self.assertEqual(0, len(tokens))
+ token_id1, data = self.create_token_sample_data(
+ tenant_id='testtenantid',
+ trust_id='testtrustid')
+ token_id2, data = self.create_token_sample_data(
+ tenant_id='testtenantid',
+ user_id='testuserid1',
+ trust_id='testtrustid1')
+ tokens = self.token_provider_api._persistence._list_tokens(
+ 'testuserid')
+ self.assertEqual(1, len(tokens))
+ self.assertIn(token_id1, tokens)
+ self.token_provider_api._persistence.delete_tokens(
+ user_id='testuserid',
+ tenant_id='testtenantid',
+ trust_id='testtrustid')
+ self.assertRaises(exception.TokenNotFound,
+ self.token_provider_api._persistence.get_token,
+ token_id1)
+ self.token_provider_api._persistence.get_token(token_id2)
+
+ def _test_token_list(self, token_list_fn):
+ tokens = token_list_fn('testuserid')
+ self.assertEqual(0, len(tokens))
+ token_id1, data = self.create_token_sample_data()
+ tokens = token_list_fn('testuserid')
+ self.assertEqual(1, len(tokens))
+ self.assertIn(token_id1, tokens)
+ token_id2, data = self.create_token_sample_data()
+ tokens = token_list_fn('testuserid')
+ self.assertEqual(2, len(tokens))
+ self.assertIn(token_id2, tokens)
+ self.assertIn(token_id1, tokens)
+ self.token_provider_api._persistence.delete_token(token_id1)
+ tokens = token_list_fn('testuserid')
+ self.assertIn(token_id2, tokens)
+ self.assertNotIn(token_id1, tokens)
+ self.token_provider_api._persistence.delete_token(token_id2)
+ tokens = token_list_fn('testuserid')
+ self.assertNotIn(token_id2, tokens)
+ self.assertNotIn(token_id1, tokens)
+
+ # tenant-specific tokens
+ tenant1 = uuid.uuid4().hex
+ tenant2 = uuid.uuid4().hex
+ token_id3, data = self.create_token_sample_data(tenant_id=tenant1)
+ token_id4, data = self.create_token_sample_data(tenant_id=tenant2)
+ # test for existing but empty tenant (LP:1078497)
+ token_id5, data = self.create_token_sample_data(tenant_id=NULL_OBJECT)
+ tokens = token_list_fn('testuserid')
+ self.assertEqual(3, len(tokens))
+ self.assertNotIn(token_id1, tokens)
+ self.assertNotIn(token_id2, tokens)
+ self.assertIn(token_id3, tokens)
+ self.assertIn(token_id4, tokens)
+ self.assertIn(token_id5, tokens)
+ tokens = token_list_fn('testuserid', tenant2)
+ self.assertEqual(1, len(tokens))
+ self.assertNotIn(token_id1, tokens)
+ self.assertNotIn(token_id2, tokens)
+ self.assertNotIn(token_id3, tokens)
+ self.assertIn(token_id4, tokens)
+
+ def test_token_list(self):
+ self._test_token_list(
+ self.token_provider_api._persistence._list_tokens)
+
+ def test_token_list_trust(self):
+ trust_id = uuid.uuid4().hex
+ token_id5, data = self.create_token_sample_data(trust_id=trust_id)
+ tokens = self.token_provider_api._persistence._list_tokens(
+ 'testuserid', trust_id=trust_id)
+ self.assertEqual(1, len(tokens))
+ self.assertIn(token_id5, tokens)
+
+ def test_get_token_404(self):
+ self.assertRaises(exception.TokenNotFound,
+ self.token_provider_api._persistence.get_token,
+ uuid.uuid4().hex)
+ self.assertRaises(exception.TokenNotFound,
+ self.token_provider_api._persistence.get_token,
+ None)
+
+ def test_delete_token_404(self):
+ self.assertRaises(exception.TokenNotFound,
+ self.token_provider_api._persistence.delete_token,
+ uuid.uuid4().hex)
+
+ def test_expired_token(self):
+ token_id = uuid.uuid4().hex
+ expire_time = timeutils.utcnow() - datetime.timedelta(minutes=1)
+ data = {'id_hash': token_id, 'id': token_id, 'a': 'b',
+ 'expires': expire_time,
+ 'trust_id': None,
+ 'user': {'id': 'testuserid'}}
+ data_ref = self.token_provider_api._persistence.create_token(token_id,
+ data)
+ data_ref.pop('user_id')
+ self.assertDictEqual(data_ref, data)
+ self.assertRaises(exception.TokenNotFound,
+ self.token_provider_api._persistence.get_token,
+ token_id)
+
+ def test_null_expires_token(self):
+ token_id = uuid.uuid4().hex
+ data = {'id': token_id, 'id_hash': token_id, 'a': 'b', 'expires': None,
+ 'user': {'id': 'testuserid'}}
+ data_ref = self.token_provider_api._persistence.create_token(token_id,
+ data)
+ self.assertIsNotNone(data_ref['expires'])
+ new_data_ref = self.token_provider_api._persistence.get_token(token_id)
+
+ # MySQL doesn't store microseconds, so discard them before testing
+ data_ref['expires'] = data_ref['expires'].replace(microsecond=0)
+ new_data_ref['expires'] = new_data_ref['expires'].replace(
+ microsecond=0)
+
+ self.assertEqual(data_ref, new_data_ref)
+
+ def check_list_revoked_tokens(self, token_ids):
+ revoked_ids = [x['id']
+ for x in self.token_provider_api.list_revoked_tokens()]
+ self._assert_revoked_token_list_matches_token_persistence(revoked_ids)
+ for token_id in token_ids:
+ self.assertIn(token_id, revoked_ids)
+
+ def delete_token(self):
+ token_id = uuid.uuid4().hex
+ data = {'id_hash': token_id, 'id': token_id, 'a': 'b',
+ 'user': {'id': 'testuserid'}}
+ data_ref = self.token_provider_api._persistence.create_token(token_id,
+ data)
+ self.token_provider_api._persistence.delete_token(token_id)
+ self.assertRaises(
+ exception.TokenNotFound,
+ self.token_provider_api._persistence.get_token,
+ data_ref['id'])
+ self.assertRaises(
+ exception.TokenNotFound,
+ self.token_provider_api._persistence.delete_token,
+ data_ref['id'])
+ return token_id
+
+ def test_list_revoked_tokens_returns_empty_list(self):
+ revoked_ids = [x['id']
+ for x in self.token_provider_api.list_revoked_tokens()]
+ self._assert_revoked_token_list_matches_token_persistence(revoked_ids)
+ self.assertEqual([], revoked_ids)
+
+ def test_list_revoked_tokens_for_single_token(self):
+ self.check_list_revoked_tokens([self.delete_token()])
+
+ def test_list_revoked_tokens_for_multiple_tokens(self):
+ self.check_list_revoked_tokens([self.delete_token()
+ for x in six.moves.range(2)])
+
+ def test_flush_expired_token(self):
+ token_id = uuid.uuid4().hex
+ expire_time = timeutils.utcnow() - datetime.timedelta(minutes=1)
+ data = {'id_hash': token_id, 'id': token_id, 'a': 'b',
+ 'expires': expire_time,
+ 'trust_id': None,
+ 'user': {'id': 'testuserid'}}
+ data_ref = self.token_provider_api._persistence.create_token(token_id,
+ data)
+ data_ref.pop('user_id')
+ self.assertDictEqual(data_ref, data)
+
+ token_id = uuid.uuid4().hex
+ expire_time = timeutils.utcnow() + datetime.timedelta(minutes=1)
+ data = {'id_hash': token_id, 'id': token_id, 'a': 'b',
+ 'expires': expire_time,
+ 'trust_id': None,
+ 'user': {'id': 'testuserid'}}
+ data_ref = self.token_provider_api._persistence.create_token(token_id,
+ data)
+ data_ref.pop('user_id')
+ self.assertDictEqual(data_ref, data)
+
+ self.token_provider_api._persistence.flush_expired_tokens()
+ tokens = self.token_provider_api._persistence._list_tokens(
+ 'testuserid')
+ self.assertEqual(1, len(tokens))
+ self.assertIn(token_id, tokens)
+
+ @tests.skip_if_cache_disabled('token')
+ def test_revocation_list_cache(self):
+ expire_time = timeutils.utcnow() + datetime.timedelta(minutes=10)
+ token_id = uuid.uuid4().hex
+ token_data = {'id_hash': token_id, 'id': token_id, 'a': 'b',
+ 'expires': expire_time,
+ 'trust_id': None,
+ 'user': {'id': 'testuserid'}}
+ token2_id = uuid.uuid4().hex
+ token2_data = {'id_hash': token2_id, 'id': token2_id, 'a': 'b',
+ 'expires': expire_time,
+ 'trust_id': None,
+ 'user': {'id': 'testuserid'}}
+ # Create 2 Tokens.
+ self.token_provider_api._persistence.create_token(token_id,
+ token_data)
+ self.token_provider_api._persistence.create_token(token2_id,
+ token2_data)
+ # Verify the revocation list is empty.
+ self.assertEqual(
+ [], self.token_provider_api._persistence.list_revoked_tokens())
+ self.assertEqual([], self.token_provider_api.list_revoked_tokens())
+ # Delete a token directly, bypassing the manager.
+ self.token_provider_api._persistence.driver.delete_token(token_id)
+ # Verify the revocation list is still empty.
+ self.assertEqual(
+ [], self.token_provider_api._persistence.list_revoked_tokens())
+ self.assertEqual([], self.token_provider_api.list_revoked_tokens())
+ # Invalidate the revocation list.
+ self.token_provider_api._persistence.invalidate_revocation_list()
+ # Verify the deleted token is in the revocation list.
+ revoked_ids = [x['id']
+ for x in self.token_provider_api.list_revoked_tokens()]
+ self._assert_revoked_token_list_matches_token_persistence(revoked_ids)
+ self.assertIn(token_id, revoked_ids)
+ # Delete the second token, through the manager
+ self.token_provider_api._persistence.delete_token(token2_id)
+ revoked_ids = [x['id']
+ for x in self.token_provider_api.list_revoked_tokens()]
+ self._assert_revoked_token_list_matches_token_persistence(revoked_ids)
+ # Verify both tokens are in the revocation list.
+ self.assertIn(token_id, revoked_ids)
+ self.assertIn(token2_id, revoked_ids)
+
+ def _test_predictable_revoked_pki_token_id(self, hash_fn):
+ token_id = self._create_token_id()
+ token_id_hash = hash_fn(token_id).hexdigest()
+ token = {'user': {'id': uuid.uuid4().hex}}
+
+ self.token_provider_api._persistence.create_token(token_id, token)
+ self.token_provider_api._persistence.delete_token(token_id)
+
+ revoked_ids = [x['id']
+ for x in self.token_provider_api.list_revoked_tokens()]
+ self._assert_revoked_token_list_matches_token_persistence(revoked_ids)
+ self.assertIn(token_id_hash, revoked_ids)
+ self.assertNotIn(token_id, revoked_ids)
+ for t in self.token_provider_api._persistence.list_revoked_tokens():
+ self.assertIn('expires', t)
+
+ def test_predictable_revoked_pki_token_id_default(self):
+ self._test_predictable_revoked_pki_token_id(hashlib.md5)
+
+ def test_predictable_revoked_pki_token_id_sha256(self):
+ self.config_fixture.config(group='token', hash_algorithm='sha256')
+ self._test_predictable_revoked_pki_token_id(hashlib.sha256)
+
+ def test_predictable_revoked_uuid_token_id(self):
+ token_id = uuid.uuid4().hex
+ token = {'user': {'id': uuid.uuid4().hex}}
+
+ self.token_provider_api._persistence.create_token(token_id, token)
+ self.token_provider_api._persistence.delete_token(token_id)
+
+ revoked_tokens = self.token_provider_api.list_revoked_tokens()
+ revoked_ids = [x['id'] for x in revoked_tokens]
+ self._assert_revoked_token_list_matches_token_persistence(revoked_ids)
+ self.assertIn(token_id, revoked_ids)
+ for t in revoked_tokens:
+ self.assertIn('expires', t)
+
+ def test_create_unicode_token_id(self):
+ token_id = six.text_type(self._create_token_id())
+ self.create_token_sample_data(token_id=token_id)
+ self.token_provider_api._persistence.get_token(token_id)
+
+ def test_create_unicode_user_id(self):
+ user_id = six.text_type(uuid.uuid4().hex)
+ token_id, data = self.create_token_sample_data(user_id=user_id)
+ self.token_provider_api._persistence.get_token(token_id)
+
+ def test_token_expire_timezone(self):
+
+ @test_utils.timezone
+ def _create_token(expire_time):
+ token_id = uuid.uuid4().hex
+ user_id = six.text_type(uuid.uuid4().hex)
+ return self.create_token_sample_data(token_id=token_id,
+ user_id=user_id,
+ expires=expire_time)
+
+ for d in ['+0', '-11', '-8', '-5', '+5', '+8', '+14']:
+ test_utils.TZ = 'UTC' + d
+ expire_time = timeutils.utcnow() + datetime.timedelta(minutes=1)
+ token_id, data_in = _create_token(expire_time)
+ data_get = self.token_provider_api._persistence.get_token(token_id)
+
+ self.assertEqual(data_in['id'], data_get['id'],
+ 'TZ=%s' % test_utils.TZ)
+
+ expire_time_expired = (
+ timeutils.utcnow() + datetime.timedelta(minutes=-1))
+ token_id, data_in = _create_token(expire_time_expired)
+ self.assertRaises(exception.TokenNotFound,
+ self.token_provider_api._persistence.get_token,
+ data_in['id'])
+
+
+class TokenCacheInvalidation(object):
+ def _create_test_data(self):
+ self.user = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
+ 'password': uuid.uuid4().hex,
+ 'domain_id': DEFAULT_DOMAIN_ID, 'enabled': True}
+ self.tenant = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
+ 'domain_id': DEFAULT_DOMAIN_ID, 'enabled': True}
+
+ # Create an equivalent of a scoped token
+ token_dict = {'user': self.user, 'tenant': self.tenant,
+ 'metadata': {}, 'id': 'placeholder'}
+ token_id, data = self.token_provider_api.issue_v2_token(token_dict)
+ self.scoped_token_id = token_id
+
+ # ..and an un-scoped one
+ token_dict = {'user': self.user, 'tenant': None,
+ 'metadata': {}, 'id': 'placeholder'}
+ token_id, data = self.token_provider_api.issue_v2_token(token_dict)
+ self.unscoped_token_id = token_id
+
+ # Validate them, in the various ways possible - this will load the
+ # responses into the token cache.
+ self._check_scoped_tokens_are_valid()
+ self._check_unscoped_tokens_are_valid()
+
+ def _check_unscoped_tokens_are_invalid(self):
+ self.assertRaises(
+ exception.TokenNotFound,
+ self.token_provider_api.validate_token,
+ self.unscoped_token_id)
+ self.assertRaises(
+ exception.TokenNotFound,
+ self.token_provider_api.validate_v2_token,
+ self.unscoped_token_id)
+
+ def _check_scoped_tokens_are_invalid(self):
+ self.assertRaises(
+ exception.TokenNotFound,
+ self.token_provider_api.validate_token,
+ self.scoped_token_id)
+ self.assertRaises(
+ exception.TokenNotFound,
+ self.token_provider_api.validate_token,
+ self.scoped_token_id,
+ self.tenant['id'])
+ self.assertRaises(
+ exception.TokenNotFound,
+ self.token_provider_api.validate_v2_token,
+ self.scoped_token_id)
+ self.assertRaises(
+ exception.TokenNotFound,
+ self.token_provider_api.validate_v2_token,
+ self.scoped_token_id,
+ self.tenant['id'])
+
+ def _check_scoped_tokens_are_valid(self):
+ self.token_provider_api.validate_token(self.scoped_token_id)
+ self.token_provider_api.validate_token(
+ self.scoped_token_id, belongs_to=self.tenant['id'])
+ self.token_provider_api.validate_v2_token(self.scoped_token_id)
+ self.token_provider_api.validate_v2_token(
+ self.scoped_token_id, belongs_to=self.tenant['id'])
+
+ def _check_unscoped_tokens_are_valid(self):
+ self.token_provider_api.validate_token(self.unscoped_token_id)
+ self.token_provider_api.validate_v2_token(self.unscoped_token_id)
+
+ def test_delete_unscoped_token(self):
+ self.token_provider_api._persistence.delete_token(
+ self.unscoped_token_id)
+ self._check_unscoped_tokens_are_invalid()
+ self._check_scoped_tokens_are_valid()
+
+ def test_delete_scoped_token_by_id(self):
+ self.token_provider_api._persistence.delete_token(self.scoped_token_id)
+ self._check_scoped_tokens_are_invalid()
+ self._check_unscoped_tokens_are_valid()
+
+ def test_delete_scoped_token_by_user(self):
+ self.token_provider_api._persistence.delete_tokens(self.user['id'])
+ # Since we are deleting all tokens for this user, they should all
+ # now be invalid.
+ self._check_scoped_tokens_are_invalid()
+ self._check_unscoped_tokens_are_invalid()
+
+ def test_delete_scoped_token_by_user_and_tenant(self):
+ self.token_provider_api._persistence.delete_tokens(
+ self.user['id'],
+ tenant_id=self.tenant['id'])
+ self._check_scoped_tokens_are_invalid()
+ self._check_unscoped_tokens_are_valid()
+
+
+class TrustTests(object):
+ def create_sample_trust(self, new_id, remaining_uses=None):
+ self.trustor = self.user_foo
+ self.trustee = self.user_two
+ trust_data = (self.trust_api.create_trust
+ (new_id,
+ {'trustor_user_id': self.trustor['id'],
+ 'trustee_user_id': self.user_two['id'],
+ 'project_id': self.tenant_bar['id'],
+ 'expires_at': timeutils.
+ parse_isotime('2031-02-18T18:10:00Z'),
+ 'impersonation': True,
+ 'remaining_uses': remaining_uses},
+ roles=[{"id": "member"},
+ {"id": "other"},
+ {"id": "browser"}]))
+ return trust_data
+
+ def test_delete_trust(self):
+ new_id = uuid.uuid4().hex
+ trust_data = self.create_sample_trust(new_id)
+ trust_id = trust_data['id']
+ self.assertIsNotNone(trust_data)
+ trust_data = self.trust_api.get_trust(trust_id)
+ self.assertEqual(new_id, trust_data['id'])
+ self.trust_api.delete_trust(trust_id)
+ self.assertIsNone(self.trust_api.get_trust(trust_id))
+
+ def test_delete_trust_not_found(self):
+ trust_id = uuid.uuid4().hex
+ self.assertRaises(exception.TrustNotFound,
+ self.trust_api.delete_trust,
+ trust_id)
+
+ def test_get_trust(self):
+ new_id = uuid.uuid4().hex
+ trust_data = self.create_sample_trust(new_id)
+ trust_id = trust_data['id']
+ self.assertIsNotNone(trust_data)
+ trust_data = self.trust_api.get_trust(trust_id)
+ self.assertEqual(new_id, trust_data['id'])
+ self.trust_api.delete_trust(trust_data['id'])
+
+ def test_get_deleted_trust(self):
+ new_id = uuid.uuid4().hex
+ trust_data = self.create_sample_trust(new_id)
+ self.assertIsNotNone(trust_data)
+ self.assertIsNone(trust_data['deleted_at'])
+ self.trust_api.delete_trust(new_id)
+ self.assertIsNone(self.trust_api.get_trust(new_id))
+ deleted_trust = self.trust_api.get_trust(trust_data['id'],
+ deleted=True)
+ self.assertEqual(trust_data['id'], deleted_trust['id'])
+ self.assertIsNotNone(deleted_trust.get('deleted_at'))
+
+ def test_create_trust(self):
+ new_id = uuid.uuid4().hex
+ trust_data = self.create_sample_trust(new_id)
+
+ self.assertEqual(new_id, trust_data['id'])
+ self.assertEqual(self.trustee['id'], trust_data['trustee_user_id'])
+ self.assertEqual(self.trustor['id'], trust_data['trustor_user_id'])
+ self.assertTrue(timeutils.normalize_time(trust_data['expires_at']) >
+ timeutils.utcnow())
+
+ self.assertEqual([{'id': 'member'},
+ {'id': 'other'},
+ {'id': 'browser'}], trust_data['roles'])
+
+ def test_list_trust_by_trustee(self):
+ for i in range(3):
+ self.create_sample_trust(uuid.uuid4().hex)
+ trusts = self.trust_api.list_trusts_for_trustee(self.trustee['id'])
+ self.assertEqual(3, len(trusts))
+ self.assertEqual(trusts[0]["trustee_user_id"], self.trustee['id'])
+ trusts = self.trust_api.list_trusts_for_trustee(self.trustor['id'])
+ self.assertEqual(0, len(trusts))
+
+ def test_list_trust_by_trustor(self):
+ for i in range(3):
+ self.create_sample_trust(uuid.uuid4().hex)
+ trusts = self.trust_api.list_trusts_for_trustor(self.trustor['id'])
+ self.assertEqual(3, len(trusts))
+ self.assertEqual(trusts[0]["trustor_user_id"], self.trustor['id'])
+ trusts = self.trust_api.list_trusts_for_trustor(self.trustee['id'])
+ self.assertEqual(0, len(trusts))
+
+ def test_list_trusts(self):
+ for i in range(3):
+ self.create_sample_trust(uuid.uuid4().hex)
+ trusts = self.trust_api.list_trusts()
+ self.assertEqual(3, len(trusts))
+
+ def test_trust_has_remaining_uses_positive(self):
+ # create a trust with limited uses, check that we have uses left
+ trust_data = self.create_sample_trust(uuid.uuid4().hex,
+ remaining_uses=5)
+ self.assertEqual(5, trust_data['remaining_uses'])
+ # create a trust with unlimited uses, check that we have uses left
+ trust_data = self.create_sample_trust(uuid.uuid4().hex)
+ self.assertIsNone(trust_data['remaining_uses'])
+
+ def test_trust_has_remaining_uses_negative(self):
+ # try to create a trust with no remaining uses, check that it fails
+ self.assertRaises(exception.ValidationError,
+ self.create_sample_trust,
+ uuid.uuid4().hex,
+ remaining_uses=0)
+ # try to create a trust with negative remaining uses,
+ # check that it fails
+ self.assertRaises(exception.ValidationError,
+ self.create_sample_trust,
+ uuid.uuid4().hex,
+ remaining_uses=-12)
+
+ def test_consume_use(self):
+ # consume a trust repeatedly until it has no uses anymore
+ trust_data = self.create_sample_trust(uuid.uuid4().hex,
+ remaining_uses=2)
+ self.trust_api.consume_use(trust_data['id'])
+ t = self.trust_api.get_trust(trust_data['id'])
+ self.assertEqual(1, t['remaining_uses'])
+ self.trust_api.consume_use(trust_data['id'])
+ # This was the last use, the trust isn't available anymore
+ self.assertIsNone(self.trust_api.get_trust(trust_data['id']))
+
+
+class CatalogTests(object):
+
+ _legacy_endpoint_id_in_endpoint = False
+ _enabled_default_to_true_when_creating_endpoint = False
+
+ def test_region_crud(self):
+ # create
+ region_id = '0' * 255
+ new_region = {
+ 'id': region_id,
+ 'description': uuid.uuid4().hex,
+ }
+ res = self.catalog_api.create_region(
+ new_region.copy())
+ # Ensure that we don't need to have a
+ # parent_region_id in the original supplied
+ # ref dict, but that it will be returned from
+ # the endpoint, with None value.
+ expected_region = new_region.copy()
+ expected_region['parent_region_id'] = None
+ self.assertDictEqual(res, expected_region)
+
+ # Test adding another region with the one above
+ # as its parent. We will check below whether deleting
+ # the parent successfully deletes any child regions.
+ parent_region_id = region_id
+ region_id = uuid.uuid4().hex
+ new_region = {
+ 'id': region_id,
+ 'description': uuid.uuid4().hex,
+ 'parent_region_id': parent_region_id,
+ }
+ res = self.catalog_api.create_region(
+ new_region.copy())
+ self.assertDictEqual(new_region, res)
+
+ # list
+ regions = self.catalog_api.list_regions()
+ self.assertThat(regions, matchers.HasLength(2))
+ region_ids = [x['id'] for x in regions]
+ self.assertIn(parent_region_id, region_ids)
+ self.assertIn(region_id, region_ids)
+
+ # update
+ region_desc_update = {'description': uuid.uuid4().hex}
+ res = self.catalog_api.update_region(region_id, region_desc_update)
+ expected_region = new_region.copy()
+ expected_region['description'] = region_desc_update['description']
+ self.assertDictEqual(expected_region, res)
+
+ # delete
+ self.catalog_api.delete_region(parent_region_id)
+ self.assertRaises(exception.RegionNotFound,
+ self.catalog_api.delete_region,
+ parent_region_id)
+ self.assertRaises(exception.RegionNotFound,
+ self.catalog_api.get_region,
+ parent_region_id)
+ # Ensure the child is also gone...
+ self.assertRaises(exception.RegionNotFound,
+ self.catalog_api.get_region,
+ region_id)
+
+ def _create_region_with_parent_id(self, parent_id=None):
+ new_region = {
+ 'id': uuid.uuid4().hex,
+ 'description': uuid.uuid4().hex,
+ 'parent_region_id': parent_id
+ }
+ self.catalog_api.create_region(
+ new_region)
+ return new_region
+
+ def test_list_regions_filtered_by_parent_region_id(self):
+ new_region = self._create_region_with_parent_id()
+ parent_id = new_region['id']
+ new_region = self._create_region_with_parent_id(parent_id)
+ new_region = self._create_region_with_parent_id(parent_id)
+
+ # filter by parent_region_id
+ hints = driver_hints.Hints()
+ hints.add_filter('parent_region_id', parent_id)
+ regions = self.catalog_api.list_regions(hints)
+ for region in regions:
+ self.assertEqual(parent_id, region['parent_region_id'])
+
+ @tests.skip_if_cache_disabled('catalog')
+ def test_cache_layer_region_crud(self):
+ region_id = uuid.uuid4().hex
+ new_region = {
+ 'id': region_id,
+ 'description': uuid.uuid4().hex,
+ }
+ self.catalog_api.create_region(new_region.copy())
+ updated_region = copy.deepcopy(new_region)
+ updated_region['description'] = uuid.uuid4().hex
+ # cache the result
+ self.catalog_api.get_region(region_id)
+ # update the region bypassing catalog_api
+ self.catalog_api.driver.update_region(region_id, updated_region)
+ self.assertDictContainsSubset(new_region,
+ self.catalog_api.get_region(region_id))
+ self.catalog_api.get_region.invalidate(self.catalog_api, region_id)
+ self.assertDictContainsSubset(updated_region,
+ self.catalog_api.get_region(region_id))
+ # delete the region
+ self.catalog_api.driver.delete_region(region_id)
+ # still get the old region
+ self.assertDictContainsSubset(updated_region,
+ self.catalog_api.get_region(region_id))
+ self.catalog_api.get_region.invalidate(self.catalog_api, region_id)
+ self.assertRaises(exception.RegionNotFound,
+ self.catalog_api.get_region, region_id)
+
+ @tests.skip_if_cache_disabled('catalog')
+ def test_invalidate_cache_when_updating_region(self):
+ region_id = uuid.uuid4().hex
+ new_region = {
+ 'id': region_id,
+ 'description': uuid.uuid4().hex
+ }
+ self.catalog_api.create_region(new_region)
+
+ # cache the region
+ self.catalog_api.get_region(region_id)
+
+ # update the region via catalog_api
+ new_description = {'description': uuid.uuid4().hex}
+ self.catalog_api.update_region(region_id, new_description)
+
+ # assert that we can get the new region
+ current_region = self.catalog_api.get_region(region_id)
+ self.assertEqual(new_description['description'],
+ current_region['description'])
+
+ def test_create_region_with_duplicate_id(self):
+ region_id = uuid.uuid4().hex
+ new_region = {
+ 'id': region_id,
+ 'description': uuid.uuid4().hex
+ }
+ self.catalog_api.create_region(new_region)
+ # Create region again with duplicate id
+ self.assertRaises(exception.Conflict,
+ self.catalog_api.create_region,
+ new_region)
+
+ def test_get_region_404(self):
+ self.assertRaises(exception.RegionNotFound,
+ self.catalog_api.get_region,
+ uuid.uuid4().hex)
+
+ def test_delete_region_404(self):
+ self.assertRaises(exception.RegionNotFound,
+ self.catalog_api.delete_region,
+ uuid.uuid4().hex)
+
+ def test_create_region_invalid_parent_region_404(self):
+ region_id = uuid.uuid4().hex
+ new_region = {
+ 'id': region_id,
+ 'description': uuid.uuid4().hex,
+ 'parent_region_id': 'nonexisting'
+ }
+ self.assertRaises(exception.RegionNotFound,
+ self.catalog_api.create_region,
+ new_region)
+
+ def test_avoid_creating_circular_references_in_regions_update(self):
+ region_one = self._create_region_with_parent_id()
+
+ # self circle: region_one->region_one
+ self.assertRaises(exception.CircularRegionHierarchyError,
+ self.catalog_api.update_region,
+ region_one['id'],
+ {'parent_region_id': region_one['id']})
+
+ # region_one->region_two->region_one
+ region_two = self._create_region_with_parent_id(region_one['id'])
+ self.assertRaises(exception.CircularRegionHierarchyError,
+ self.catalog_api.update_region,
+ region_one['id'],
+ {'parent_region_id': region_two['id']})
+
+ # region_one region_two->region_three->region_four->region_two
+ region_three = self._create_region_with_parent_id(region_two['id'])
+ region_four = self._create_region_with_parent_id(region_three['id'])
+ self.assertRaises(exception.CircularRegionHierarchyError,
+ self.catalog_api.update_region,
+ region_two['id'],
+ {'parent_region_id': region_four['id']})
+
+ @mock.patch.object(core.Driver,
+ "_ensure_no_circle_in_hierarchical_regions")
+ def test_circular_regions_can_be_deleted(self, mock_ensure_on_circle):
+ # turn off the enforcement so that cycles can be created for the test
+ mock_ensure_on_circle.return_value = None
+
+ region_one = self._create_region_with_parent_id()
+
+ # self circle: region_one->region_one
+ self.catalog_api.update_region(
+ region_one['id'],
+ {'parent_region_id': region_one['id']})
+ self.catalog_api.delete_region(region_one['id'])
+ self.assertRaises(exception.RegionNotFound,
+ self.catalog_api.get_region,
+ region_one['id'])
+
+ # region_one->region_two->region_one
+ region_one = self._create_region_with_parent_id()
+ region_two = self._create_region_with_parent_id(region_one['id'])
+ self.catalog_api.update_region(
+ region_one['id'],
+ {'parent_region_id': region_two['id']})
+ self.catalog_api.delete_region(region_one['id'])
+ self.assertRaises(exception.RegionNotFound,
+ self.catalog_api.get_region,
+ region_one['id'])
+ self.assertRaises(exception.RegionNotFound,
+ self.catalog_api.get_region,
+ region_two['id'])
+
+ # region_one->region_two->region_three->region_one
+ region_one = self._create_region_with_parent_id()
+ region_two = self._create_region_with_parent_id(region_one['id'])
+ region_three = self._create_region_with_parent_id(region_two['id'])
+ self.catalog_api.update_region(
+ region_one['id'],
+ {'parent_region_id': region_three['id']})
+ self.catalog_api.delete_region(region_two['id'])
+ self.assertRaises(exception.RegionNotFound,
+ self.catalog_api.get_region,
+ region_two['id'])
+ self.assertRaises(exception.RegionNotFound,
+ self.catalog_api.get_region,
+ region_one['id'])
+ self.assertRaises(exception.RegionNotFound,
+ self.catalog_api.get_region,
+ region_three['id'])
+
+ def test_service_crud(self):
+ # create
+ service_id = uuid.uuid4().hex
+ new_service = {
+ 'id': service_id,
+ 'type': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'description': uuid.uuid4().hex,
+ }
+ res = self.catalog_api.create_service(
+ service_id,
+ new_service.copy())
+ new_service['enabled'] = True
+ self.assertDictEqual(new_service, res)
+
+ # list
+ services = self.catalog_api.list_services()
+ self.assertIn(service_id, [x['id'] for x in services])
+
+ # update
+ service_name_update = {'name': uuid.uuid4().hex}
+ res = self.catalog_api.update_service(service_id, service_name_update)
+ expected_service = new_service.copy()
+ expected_service['name'] = service_name_update['name']
+ self.assertDictEqual(expected_service, res)
+
+ # delete
+ self.catalog_api.delete_service(service_id)
+ self.assertRaises(exception.ServiceNotFound,
+ self.catalog_api.delete_service,
+ service_id)
+ self.assertRaises(exception.ServiceNotFound,
+ self.catalog_api.get_service,
+ service_id)
+
+ def _create_random_service(self):
+ service_id = uuid.uuid4().hex
+ new_service = {
+ 'id': service_id,
+ 'type': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'description': uuid.uuid4().hex,
+ }
+ return self.catalog_api.create_service(service_id, new_service.copy())
+
+ def test_service_filtering(self):
+ target_service = self._create_random_service()
+ unrelated_service1 = self._create_random_service()
+ unrelated_service2 = self._create_random_service()
+
+ # filter by type
+ hint_for_type = driver_hints.Hints()
+ hint_for_type.add_filter(name="type", value=target_service['type'])
+ services = self.catalog_api.list_services(hint_for_type)
+
+ self.assertEqual(1, len(services))
+ filtered_service = services[0]
+ self.assertEqual(target_service['type'], filtered_service['type'])
+ self.assertEqual(target_service['id'], filtered_service['id'])
+
+ # filter should have been removed, since it was already used by the
+ # backend
+ self.assertEqual(0, len(hint_for_type.filters))
+
+ # the backend shouldn't filter by name, since this is handled by the
+ # front end
+ hint_for_name = driver_hints.Hints()
+ hint_for_name.add_filter(name="name", value=target_service['name'])
+ services = self.catalog_api.list_services(hint_for_name)
+
+ self.assertEqual(3, len(services))
+
+ # filter should still be there, since it wasn't used by the backend
+ self.assertEqual(1, len(hint_for_name.filters))
+
+ self.catalog_api.delete_service(target_service['id'])
+ self.catalog_api.delete_service(unrelated_service1['id'])
+ self.catalog_api.delete_service(unrelated_service2['id'])
+
+ @tests.skip_if_cache_disabled('catalog')
+ def test_cache_layer_service_crud(self):
+ service_id = uuid.uuid4().hex
+ new_service = {
+ 'id': service_id,
+ 'type': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'description': uuid.uuid4().hex,
+ }
+ res = self.catalog_api.create_service(
+ service_id,
+ new_service.copy())
+ new_service['enabled'] = True
+ self.assertDictEqual(new_service, res)
+ self.catalog_api.get_service(service_id)
+ updated_service = copy.deepcopy(new_service)
+ updated_service['description'] = uuid.uuid4().hex
+ # update bypassing catalog api
+ self.catalog_api.driver.update_service(service_id, updated_service)
+ self.assertDictContainsSubset(new_service,
+ self.catalog_api.get_service(service_id))
+ self.catalog_api.get_service.invalidate(self.catalog_api, service_id)
+ self.assertDictContainsSubset(updated_service,
+ self.catalog_api.get_service(service_id))
+
+ # delete bypassing catalog api
+ self.catalog_api.driver.delete_service(service_id)
+ self.assertDictContainsSubset(updated_service,
+ self.catalog_api.get_service(service_id))
+ self.catalog_api.get_service.invalidate(self.catalog_api, service_id)
+ self.assertRaises(exception.ServiceNotFound,
+ self.catalog_api.delete_service,
+ service_id)
+ self.assertRaises(exception.ServiceNotFound,
+ self.catalog_api.get_service,
+ service_id)
+
+ @tests.skip_if_cache_disabled('catalog')
+ def test_invalidate_cache_when_updating_service(self):
+ service_id = uuid.uuid4().hex
+ new_service = {
+ 'id': service_id,
+ 'type': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'description': uuid.uuid4().hex,
+ }
+ self.catalog_api.create_service(
+ service_id,
+ new_service.copy())
+
+ # cache the service
+ self.catalog_api.get_service(service_id)
+
+ # update the service via catalog api
+ new_type = {'type': uuid.uuid4().hex}
+ self.catalog_api.update_service(service_id, new_type)
+
+ # assert that we can get the new service
+ current_service = self.catalog_api.get_service(service_id)
+ self.assertEqual(new_type['type'], current_service['type'])
+
+ def test_delete_service_with_endpoint(self):
+ # create a service
+ service = {
+ 'id': uuid.uuid4().hex,
+ 'type': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'description': uuid.uuid4().hex,
+ }
+ self.catalog_api.create_service(service['id'], service)
+
+ # create an endpoint attached to the service
+ endpoint = {
+ 'id': uuid.uuid4().hex,
+ 'region': uuid.uuid4().hex,
+ 'interface': uuid.uuid4().hex[:8],
+ 'url': uuid.uuid4().hex,
+ 'service_id': service['id'],
+ }
+ self.catalog_api.create_endpoint(endpoint['id'], endpoint)
+
+ # deleting the service should also delete the endpoint
+ self.catalog_api.delete_service(service['id'])
+ self.assertRaises(exception.EndpointNotFound,
+ self.catalog_api.get_endpoint,
+ endpoint['id'])
+ self.assertRaises(exception.EndpointNotFound,
+ self.catalog_api.delete_endpoint,
+ endpoint['id'])
+
+ def test_cache_layer_delete_service_with_endpoint(self):
+ service = {
+ 'id': uuid.uuid4().hex,
+ 'type': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'description': uuid.uuid4().hex,
+ }
+ self.catalog_api.create_service(service['id'], service)
+
+ # create an endpoint attached to the service
+ endpoint = {
+ 'id': uuid.uuid4().hex,
+ 'region_id': None,
+ 'interface': uuid.uuid4().hex[:8],
+ 'url': uuid.uuid4().hex,
+ 'service_id': service['id'],
+ }
+ self.catalog_api.create_endpoint(endpoint['id'], endpoint)
+ # cache the result
+ self.catalog_api.get_service(service['id'])
+ self.catalog_api.get_endpoint(endpoint['id'])
+ # delete the service bypassing catalog api
+ self.catalog_api.driver.delete_service(service['id'])
+ self.assertDictContainsSubset(endpoint,
+ self.catalog_api.
+ get_endpoint(endpoint['id']))
+ self.assertDictContainsSubset(service,
+ self.catalog_api.
+ get_service(service['id']))
+ self.catalog_api.get_endpoint.invalidate(self.catalog_api,
+ endpoint['id'])
+ self.assertRaises(exception.EndpointNotFound,
+ self.catalog_api.get_endpoint,
+ endpoint['id'])
+ self.assertRaises(exception.EndpointNotFound,
+ self.catalog_api.delete_endpoint,
+ endpoint['id'])
+ # multiple endpoints associated with a service
+ second_endpoint = {
+ 'id': uuid.uuid4().hex,
+ 'region_id': None,
+ 'interface': uuid.uuid4().hex[:8],
+ 'url': uuid.uuid4().hex,
+ 'service_id': service['id'],
+ }
+ self.catalog_api.create_service(service['id'], service)
+ self.catalog_api.create_endpoint(endpoint['id'], endpoint)
+ self.catalog_api.create_endpoint(second_endpoint['id'],
+ second_endpoint)
+ self.catalog_api.delete_service(service['id'])
+ self.assertRaises(exception.EndpointNotFound,
+ self.catalog_api.get_endpoint,
+ endpoint['id'])
+ self.assertRaises(exception.EndpointNotFound,
+ self.catalog_api.delete_endpoint,
+ endpoint['id'])
+ self.assertRaises(exception.EndpointNotFound,
+ self.catalog_api.get_endpoint,
+ second_endpoint['id'])
+ self.assertRaises(exception.EndpointNotFound,
+ self.catalog_api.delete_endpoint,
+ second_endpoint['id'])
+
+ def test_get_service_404(self):
+ self.assertRaises(exception.ServiceNotFound,
+ self.catalog_api.get_service,
+ uuid.uuid4().hex)
+
+ def test_delete_service_404(self):
+ self.assertRaises(exception.ServiceNotFound,
+ self.catalog_api.delete_service,
+ uuid.uuid4().hex)
+
+ def test_create_endpoint_nonexistent_service(self):
+ endpoint = {
+ 'id': uuid.uuid4().hex,
+ 'service_id': uuid.uuid4().hex,
+ }
+ self.assertRaises(exception.ValidationError,
+ self.catalog_api.create_endpoint,
+ endpoint['id'],
+ endpoint)
+
+ def test_update_endpoint_nonexistent_service(self):
+ dummy_service, enabled_endpoint, dummy_disabled_endpoint = (
+ self._create_endpoints())
+ new_endpoint = {
+ 'service_id': uuid.uuid4().hex,
+ }
+ self.assertRaises(exception.ValidationError,
+ self.catalog_api.update_endpoint,
+ enabled_endpoint['id'],
+ new_endpoint)
+
+ def test_create_endpoint_nonexistent_region(self):
+ service = {
+ 'id': uuid.uuid4().hex,
+ 'type': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'description': uuid.uuid4().hex,
+ }
+ self.catalog_api.create_service(service['id'], service.copy())
+
+ endpoint = {
+ 'id': uuid.uuid4().hex,
+ 'region_id': None,
+ 'service_id': service['id'],
+ 'interface': 'public',
+ 'url': uuid.uuid4().hex,
+ 'region_id': uuid.uuid4().hex,
+ }
+ self.assertRaises(exception.ValidationError,
+ self.catalog_api.create_endpoint,
+ endpoint['id'],
+ endpoint)
+
+ def test_update_endpoint_nonexistent_region(self):
+ dummy_service, enabled_endpoint, dummy_disabled_endpoint = (
+ self._create_endpoints())
+ new_endpoint = {
+ 'region_id': uuid.uuid4().hex,
+ }
+ self.assertRaises(exception.ValidationError,
+ self.catalog_api.update_endpoint,
+ enabled_endpoint['id'],
+ new_endpoint)
+
+ def test_get_endpoint_404(self):
+ self.assertRaises(exception.EndpointNotFound,
+ self.catalog_api.get_endpoint,
+ uuid.uuid4().hex)
+
+ def test_delete_endpoint_404(self):
+ self.assertRaises(exception.EndpointNotFound,
+ self.catalog_api.delete_endpoint,
+ uuid.uuid4().hex)
+
+ def test_create_endpoint(self):
+ service = {
+ 'id': uuid.uuid4().hex,
+ 'type': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'description': uuid.uuid4().hex,
+ }
+ self.catalog_api.create_service(service['id'], service.copy())
+
+ endpoint = {
+ 'id': uuid.uuid4().hex,
+ 'region_id': None,
+ 'service_id': service['id'],
+ 'interface': 'public',
+ 'url': uuid.uuid4().hex,
+ }
+ self.catalog_api.create_endpoint(endpoint['id'], endpoint.copy())
+
+ def test_update_endpoint(self):
+ dummy_service_ref, endpoint_ref, dummy_disabled_endpoint_ref = (
+ self._create_endpoints())
+ res = self.catalog_api.update_endpoint(endpoint_ref['id'],
+ {'interface': 'private'})
+ expected_endpoint = endpoint_ref.copy()
+ expected_endpoint['interface'] = 'private'
+ if self._legacy_endpoint_id_in_endpoint:
+ expected_endpoint['legacy_endpoint_id'] = None
+ if self._enabled_default_to_true_when_creating_endpoint:
+ expected_endpoint['enabled'] = True
+ self.assertDictEqual(expected_endpoint, res)
+
+ def _create_endpoints(self):
+ # Creates a service and 2 endpoints for the service in the same region.
+ # The 'public' interface is enabled and the 'internal' interface is
+ # disabled.
+
+ def create_endpoint(service_id, region, **kwargs):
+ id_ = uuid.uuid4().hex
+ ref = {
+ 'id': id_,
+ 'interface': 'public',
+ 'region_id': region,
+ 'service_id': service_id,
+ 'url': 'http://localhost/%s' % uuid.uuid4().hex,
+ }
+ ref.update(kwargs)
+ self.catalog_api.create_endpoint(id_, ref)
+ return ref
+
+ # Create a service for use with the endpoints.
+ service_id = uuid.uuid4().hex
+ service_ref = {
+ 'id': service_id,
+ 'name': uuid.uuid4().hex,
+ 'type': uuid.uuid4().hex,
+ }
+ self.catalog_api.create_service(service_id, service_ref)
+
+ region = {'id': uuid.uuid4().hex}
+ self.catalog_api.create_region(region)
+
+ # Create endpoints
+ enabled_endpoint_ref = create_endpoint(service_id, region['id'])
+ disabled_endpoint_ref = create_endpoint(
+ service_id, region['id'], enabled=False, interface='internal')
+
+ return service_ref, enabled_endpoint_ref, disabled_endpoint_ref
+
+ def test_get_catalog_endpoint_disabled(self):
+ """Get back only enabled endpoints when get the v2 catalog."""
+
+ service_ref, enabled_endpoint_ref, dummy_disabled_endpoint_ref = (
+ self._create_endpoints())
+
+ user_id = uuid.uuid4().hex
+ project_id = uuid.uuid4().hex
+ catalog = self.catalog_api.get_catalog(user_id, project_id)
+
+ exp_entry = {
+ 'id': enabled_endpoint_ref['id'],
+ 'name': service_ref['name'],
+ 'publicURL': enabled_endpoint_ref['url'],
+ }
+
+ region = enabled_endpoint_ref['region_id']
+ self.assertEqual(exp_entry, catalog[region][service_ref['type']])
+
+ def test_get_v3_catalog_endpoint_disabled(self):
+ """Get back only enabled endpoints when get the v3 catalog."""
+
+ enabled_endpoint_ref = self._create_endpoints()[1]
+
+ user_id = uuid.uuid4().hex
+ project_id = uuid.uuid4().hex
+ catalog = self.catalog_api.get_v3_catalog(user_id, project_id)
+
+ endpoint_ids = [x['id'] for x in catalog[0]['endpoints']]
+ self.assertEqual([enabled_endpoint_ref['id']], endpoint_ids)
+
+ @tests.skip_if_cache_disabled('catalog')
+ def test_invalidate_cache_when_updating_endpoint(self):
+ service = {
+ 'id': uuid.uuid4().hex,
+ 'type': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'description': uuid.uuid4().hex,
+ }
+ self.catalog_api.create_service(service['id'], service)
+
+ # create an endpoint attached to the service
+ endpoint_id = uuid.uuid4().hex
+ endpoint = {
+ 'id': endpoint_id,
+ 'region_id': None,
+ 'interface': uuid.uuid4().hex[:8],
+ 'url': uuid.uuid4().hex,
+ 'service_id': service['id'],
+ }
+ self.catalog_api.create_endpoint(endpoint_id, endpoint)
+
+ # cache the endpoint
+ self.catalog_api.get_endpoint(endpoint_id)
+
+ # update the endpoint via catalog api
+ new_url = {'url': uuid.uuid4().hex}
+ self.catalog_api.update_endpoint(endpoint_id, new_url)
+
+ # assert that we can get the new endpoint
+ current_endpoint = self.catalog_api.get_endpoint(endpoint_id)
+ self.assertEqual(new_url['url'], current_endpoint['url'])
+
+
+class PolicyTests(object):
+ def _new_policy_ref(self):
+ return {
+ 'id': uuid.uuid4().hex,
+ 'policy': uuid.uuid4().hex,
+ 'type': uuid.uuid4().hex,
+ 'endpoint_id': uuid.uuid4().hex,
+ }
+
+ def assertEqualPolicies(self, a, b):
+ self.assertEqual(a['id'], b['id'])
+ self.assertEqual(a['endpoint_id'], b['endpoint_id'])
+ self.assertEqual(a['policy'], b['policy'])
+ self.assertEqual(a['type'], b['type'])
+
+ def test_create(self):
+ ref = self._new_policy_ref()
+ res = self.policy_api.create_policy(ref['id'], ref)
+ self.assertEqualPolicies(ref, res)
+
+ def test_get(self):
+ ref = self._new_policy_ref()
+ res = self.policy_api.create_policy(ref['id'], ref)
+
+ res = self.policy_api.get_policy(ref['id'])
+ self.assertEqualPolicies(ref, res)
+
+ def test_list(self):
+ ref = self._new_policy_ref()
+ self.policy_api.create_policy(ref['id'], ref)
+
+ res = self.policy_api.list_policies()
+ res = [x for x in res if x['id'] == ref['id']][0]
+ self.assertEqualPolicies(ref, res)
+
+ def test_update(self):
+ ref = self._new_policy_ref()
+ self.policy_api.create_policy(ref['id'], ref)
+ orig = ref
+
+ ref = self._new_policy_ref()
+
+ # (cannot change policy ID)
+ self.assertRaises(exception.ValidationError,
+ self.policy_api.update_policy,
+ orig['id'],
+ ref)
+
+ ref['id'] = orig['id']
+ res = self.policy_api.update_policy(orig['id'], ref)
+ self.assertEqualPolicies(ref, res)
+
+ def test_delete(self):
+ ref = self._new_policy_ref()
+ self.policy_api.create_policy(ref['id'], ref)
+
+ self.policy_api.delete_policy(ref['id'])
+ self.assertRaises(exception.PolicyNotFound,
+ self.policy_api.delete_policy,
+ ref['id'])
+ self.assertRaises(exception.PolicyNotFound,
+ self.policy_api.get_policy,
+ ref['id'])
+ res = self.policy_api.list_policies()
+ self.assertFalse(len([x for x in res if x['id'] == ref['id']]))
+
+ def test_get_policy_404(self):
+ self.assertRaises(exception.PolicyNotFound,
+ self.policy_api.get_policy,
+ uuid.uuid4().hex)
+
+ def test_update_policy_404(self):
+ ref = self._new_policy_ref()
+ self.assertRaises(exception.PolicyNotFound,
+ self.policy_api.update_policy,
+ ref['id'],
+ ref)
+
+ def test_delete_policy_404(self):
+ self.assertRaises(exception.PolicyNotFound,
+ self.policy_api.delete_policy,
+ uuid.uuid4().hex)
+
+
+class InheritanceTests(object):
+
+ def test_inherited_role_grants_for_user(self):
+ """Test inherited user roles.
+
+ Test Plan:
+
+ - Enable OS-INHERIT extension
+ - Create 3 roles
+ - Create a domain, with a project and a user
+ - Check no roles yet exit
+ - Assign a direct user role to the project and a (non-inherited)
+ user role to the domain
+ - Get a list of effective roles - should only get the one direct role
+ - Now add an inherited user role to the domain
+ - Get a list of effective roles - should have two roles, one
+ direct and one by virtue of the inherited user role
+ - Also get effective roles for the domain - the role marked as
+ inherited should not show up
+
+ """
+ self.config_fixture.config(group='os_inherit', enabled=True)
+ role_list = []
+ for _ in range(3):
+ role = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.role_api.create_role(role['id'], role)
+ role_list.append(role)
+ domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.resource_api.create_domain(domain1['id'], domain1)
+ user1 = {'name': uuid.uuid4().hex, 'domain_id': domain1['id'],
+ 'password': uuid.uuid4().hex, 'enabled': True}
+ user1 = self.identity_api.create_user(user1)
+ project1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
+ 'domain_id': domain1['id']}
+ self.resource_api.create_project(project1['id'], project1)
+
+ roles_ref = self.assignment_api.list_grants(
+ user_id=user1['id'],
+ project_id=project1['id'])
+ self.assertEqual(0, len(roles_ref))
+
+ # Create the first two roles - the domain one is not inherited
+ self.assignment_api.create_grant(user_id=user1['id'],
+ project_id=project1['id'],
+ role_id=role_list[0]['id'])
+ self.assignment_api.create_grant(user_id=user1['id'],
+ domain_id=domain1['id'],
+ role_id=role_list[1]['id'])
+
+ # Now get the effective roles for the user and project, this
+ # should only include the direct role assignment on the project
+ combined_list = self.assignment_api.get_roles_for_user_and_project(
+ user1['id'], project1['id'])
+ self.assertEqual(1, len(combined_list))
+ self.assertIn(role_list[0]['id'], combined_list)
+
+ # Now add an inherited role on the domain
+ self.assignment_api.create_grant(user_id=user1['id'],
+ domain_id=domain1['id'],
+ role_id=role_list[2]['id'],
+ inherited_to_projects=True)
+
+ # Now get the effective roles for the user and project again, this
+ # should now include the inherited role on the domain
+ combined_list = self.assignment_api.get_roles_for_user_and_project(
+ user1['id'], project1['id'])
+ self.assertEqual(2, len(combined_list))
+ self.assertIn(role_list[0]['id'], combined_list)
+ self.assertIn(role_list[2]['id'], combined_list)
+
+ # Finally, check that the inherited role does not appear as a valid
+ # directly assigned role on the domain itself
+ combined_role_list = self.assignment_api.get_roles_for_user_and_domain(
+ user1['id'], domain1['id'])
+ self.assertEqual(1, len(combined_role_list))
+ self.assertIn(role_list[1]['id'], combined_role_list)
+
+ def test_inherited_role_grants_for_group(self):
+ """Test inherited group roles.
+
+ Test Plan:
+
+ - Enable OS-INHERIT extension
+ - Create 4 roles
+ - Create a domain, with a project, user and two groups
+ - Make the user a member of both groups
+ - Check no roles yet exit
+ - Assign a direct user role to the project and a (non-inherited)
+ group role on the domain
+ - Get a list of effective roles - should only get the one direct role
+ - Now add two inherited group roles to the domain
+ - Get a list of effective roles - should have three roles, one
+ direct and two by virtue of inherited group roles
+
+ """
+ self.config_fixture.config(group='os_inherit', enabled=True)
+ role_list = []
+ for _ in range(4):
+ role = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.role_api.create_role(role['id'], role)
+ role_list.append(role)
+ domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.resource_api.create_domain(domain1['id'], domain1)
+ user1 = {'name': uuid.uuid4().hex, 'domain_id': domain1['id'],
+ 'password': uuid.uuid4().hex, 'enabled': True}
+ user1 = self.identity_api.create_user(user1)
+ group1 = {'name': uuid.uuid4().hex, 'domain_id': domain1['id'],
+ 'enabled': True}
+ group1 = self.identity_api.create_group(group1)
+ group2 = {'name': uuid.uuid4().hex, 'domain_id': domain1['id'],
+ 'enabled': True}
+ group2 = self.identity_api.create_group(group2)
+ project1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
+ 'domain_id': domain1['id']}
+ self.resource_api.create_project(project1['id'], project1)
+
+ self.identity_api.add_user_to_group(user1['id'],
+ group1['id'])
+ self.identity_api.add_user_to_group(user1['id'],
+ group2['id'])
+
+ roles_ref = self.assignment_api.list_grants(
+ user_id=user1['id'],
+ project_id=project1['id'])
+ self.assertEqual(0, len(roles_ref))
+
+ # Create two roles - the domain one is not inherited
+ self.assignment_api.create_grant(user_id=user1['id'],
+ project_id=project1['id'],
+ role_id=role_list[0]['id'])
+ self.assignment_api.create_grant(group_id=group1['id'],
+ domain_id=domain1['id'],
+ role_id=role_list[1]['id'])
+
+ # Now get the effective roles for the user and project, this
+ # should only include the direct role assignment on the project
+ combined_list = self.assignment_api.get_roles_for_user_and_project(
+ user1['id'], project1['id'])
+ self.assertEqual(1, len(combined_list))
+ self.assertIn(role_list[0]['id'], combined_list)
+
+ # Now add to more group roles, both inherited, to the domain
+ self.assignment_api.create_grant(group_id=group2['id'],
+ domain_id=domain1['id'],
+ role_id=role_list[2]['id'],
+ inherited_to_projects=True)
+ self.assignment_api.create_grant(group_id=group2['id'],
+ domain_id=domain1['id'],
+ role_id=role_list[3]['id'],
+ inherited_to_projects=True)
+
+ # Now get the effective roles for the user and project again, this
+ # should now include the inherited roles on the domain
+ combined_list = self.assignment_api.get_roles_for_user_and_project(
+ user1['id'], project1['id'])
+ self.assertEqual(3, len(combined_list))
+ self.assertIn(role_list[0]['id'], combined_list)
+ self.assertIn(role_list[2]['id'], combined_list)
+ self.assertIn(role_list[3]['id'], combined_list)
+
+ def test_list_projects_for_user_with_inherited_grants(self):
+ """Test inherited user roles.
+
+ Test Plan:
+
+ - Enable OS-INHERIT extension
+ - Create a domain, with two projects and a user
+ - Assign an inherited user role on the domain, as well as a direct
+ user role to a separate project in a different domain
+ - Get a list of projects for user, should return all three projects
+
+ """
+ self.config_fixture.config(group='os_inherit', enabled=True)
+ domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.resource_api.create_domain(domain['id'], domain)
+ user1 = {'name': uuid.uuid4().hex, 'password': uuid.uuid4().hex,
+ 'domain_id': domain['id'], 'enabled': True}
+ user1 = self.identity_api.create_user(user1)
+ project1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
+ 'domain_id': domain['id']}
+ self.resource_api.create_project(project1['id'], project1)
+ project2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
+ 'domain_id': domain['id']}
+ self.resource_api.create_project(project2['id'], project2)
+
+ # Create 2 grants, one on a project and one inherited grant
+ # on the domain
+ self.assignment_api.create_grant(user_id=user1['id'],
+ project_id=self.tenant_bar['id'],
+ role_id=self.role_member['id'])
+ self.assignment_api.create_grant(user_id=user1['id'],
+ domain_id=domain['id'],
+ role_id=self.role_admin['id'],
+ inherited_to_projects=True)
+ # Should get back all three projects, one by virtue of the direct
+ # grant, plus both projects in the domain
+ user_projects = self.assignment_api.list_projects_for_user(user1['id'])
+ self.assertEqual(3, len(user_projects))
+
+ def test_list_projects_for_user_with_inherited_user_project_grants(self):
+ """Test inherited role assignments for users on nested projects.
+
+ Test Plan:
+
+ - Enable OS-INHERIT extension
+ - Create a hierarchy of projects with one root and one leaf project
+ - Assign an inherited user role on root project
+ - Assign a non-inherited user role on root project
+ - Get a list of projects for user, should return both projects
+ - Disable OS-INHERIT extension
+ - Get a list of projects for user, should return only root project
+
+ """
+ # Enable OS-INHERIT extension
+ self.config_fixture.config(group='os_inherit', enabled=True)
+ root_project = {'id': uuid.uuid4().hex,
+ 'description': '',
+ 'domain_id': DEFAULT_DOMAIN_ID,
+ 'enabled': True,
+ 'name': uuid.uuid4().hex,
+ 'parent_id': None}
+ self.resource_api.create_project(root_project['id'], root_project)
+ leaf_project = {'id': uuid.uuid4().hex,
+ 'description': '',
+ 'domain_id': DEFAULT_DOMAIN_ID,
+ 'enabled': True,
+ 'name': uuid.uuid4().hex,
+ 'parent_id': root_project['id']}
+ self.resource_api.create_project(leaf_project['id'], leaf_project)
+
+ user = {'name': uuid.uuid4().hex, 'password': uuid.uuid4().hex,
+ 'domain_id': DEFAULT_DOMAIN_ID, 'enabled': True}
+ user = self.identity_api.create_user(user)
+
+ # Grant inherited user role
+ self.assignment_api.create_grant(user_id=user['id'],
+ project_id=root_project['id'],
+ role_id=self.role_admin['id'],
+ inherited_to_projects=True)
+ # Grant non-inherited user role
+ self.assignment_api.create_grant(user_id=user['id'],
+ project_id=root_project['id'],
+ role_id=self.role_member['id'])
+ # Should get back both projects: because the direct role assignment for
+ # the root project and inherited role assignment for leaf project
+ user_projects = self.assignment_api.list_projects_for_user(user['id'])
+ self.assertEqual(2, len(user_projects))
+ self.assertIn(root_project, user_projects)
+ self.assertIn(leaf_project, user_projects)
+
+ # Disable OS-INHERIT extension
+ self.config_fixture.config(group='os_inherit', enabled=False)
+ # Should get back just root project - due the direct role assignment
+ user_projects = self.assignment_api.list_projects_for_user(user['id'])
+ self.assertEqual(1, len(user_projects))
+ self.assertIn(root_project, user_projects)
+
+ def test_list_projects_for_user_with_inherited_group_grants(self):
+ """Test inherited group roles.
+
+ Test Plan:
+
+ - Enable OS-INHERIT extension
+ - Create two domains, each with two projects
+ - Create a user and group
+ - Make the user a member of the group
+ - Assign a user role two projects, an inherited
+ group role to one domain and an inherited regular role on
+ the other domain
+ - Get a list of projects for user, should return both pairs of projects
+ from the domain, plus the one separate project
+
+ """
+ self.config_fixture.config(group='os_inherit', enabled=True)
+ domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.resource_api.create_domain(domain['id'], domain)
+ domain2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.resource_api.create_domain(domain2['id'], domain2)
+ project1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
+ 'domain_id': domain['id']}
+ self.resource_api.create_project(project1['id'], project1)
+ project2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
+ 'domain_id': domain['id']}
+ self.resource_api.create_project(project2['id'], project2)
+ project3 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
+ 'domain_id': domain2['id']}
+ self.resource_api.create_project(project3['id'], project3)
+ project4 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
+ 'domain_id': domain2['id']}
+ self.resource_api.create_project(project4['id'], project4)
+ user1 = {'name': uuid.uuid4().hex, 'password': uuid.uuid4().hex,
+ 'domain_id': domain['id'], 'enabled': True}
+ user1 = self.identity_api.create_user(user1)
+ group1 = {'name': uuid.uuid4().hex, 'domain_id': domain['id']}
+ group1 = self.identity_api.create_group(group1)
+ self.identity_api.add_user_to_group(user1['id'], group1['id'])
+
+ # Create 4 grants:
+ # - one user grant on a project in domain2
+ # - one user grant on a project in the default domain
+ # - one inherited user grant on domain
+ # - one inherited group grant on domain2
+ self.assignment_api.create_grant(user_id=user1['id'],
+ project_id=project3['id'],
+ role_id=self.role_member['id'])
+ self.assignment_api.create_grant(user_id=user1['id'],
+ project_id=self.tenant_bar['id'],
+ role_id=self.role_member['id'])
+ self.assignment_api.create_grant(user_id=user1['id'],
+ domain_id=domain['id'],
+ role_id=self.role_admin['id'],
+ inherited_to_projects=True)
+ self.assignment_api.create_grant(group_id=group1['id'],
+ domain_id=domain2['id'],
+ role_id=self.role_admin['id'],
+ inherited_to_projects=True)
+ # Should get back all five projects, but without a duplicate for
+ # project3 (since it has both a direct user role and an inherited role)
+ user_projects = self.assignment_api.list_projects_for_user(user1['id'])
+ self.assertEqual(5, len(user_projects))
+
+ def test_list_projects_for_user_with_inherited_group_project_grants(self):
+ """Test inherited role assignments for groups on nested projects.
+
+ Test Plan:
+
+ - Enable OS-INHERIT extension
+ - Create a hierarchy of projects with one root and one leaf project
+ - Assign an inherited group role on root project
+ - Assign a non-inherited group role on root project
+ - Get a list of projects for user, should return both projects
+ - Disable OS-INHERIT extension
+ - Get a list of projects for user, should return only root project
+
+ """
+ self.config_fixture.config(group='os_inherit', enabled=True)
+ root_project = {'id': uuid.uuid4().hex,
+ 'description': '',
+ 'domain_id': DEFAULT_DOMAIN_ID,
+ 'enabled': True,
+ 'name': uuid.uuid4().hex,
+ 'parent_id': None}
+ self.resource_api.create_project(root_project['id'], root_project)
+ leaf_project = {'id': uuid.uuid4().hex,
+ 'description': '',
+ 'domain_id': DEFAULT_DOMAIN_ID,
+ 'enabled': True,
+ 'name': uuid.uuid4().hex,
+ 'parent_id': root_project['id']}
+ self.resource_api.create_project(leaf_project['id'], leaf_project)
+
+ user = {'name': uuid.uuid4().hex, 'password': uuid.uuid4().hex,
+ 'domain_id': DEFAULT_DOMAIN_ID, 'enabled': True}
+ user = self.identity_api.create_user(user)
+
+ group = {'name': uuid.uuid4().hex, 'domain_id': DEFAULT_DOMAIN_ID}
+ group = self.identity_api.create_group(group)
+ self.identity_api.add_user_to_group(user['id'], group['id'])
+
+ # Grant inherited group role
+ self.assignment_api.create_grant(group_id=group['id'],
+ project_id=root_project['id'],
+ role_id=self.role_admin['id'],
+ inherited_to_projects=True)
+ # Grant non-inherited group role
+ self.assignment_api.create_grant(group_id=group['id'],
+ project_id=root_project['id'],
+ role_id=self.role_member['id'])
+ # Should get back both projects: because the direct role assignment for
+ # the root project and inherited role assignment for leaf project
+ user_projects = self.assignment_api.list_projects_for_user(user['id'])
+ self.assertEqual(2, len(user_projects))
+ self.assertIn(root_project, user_projects)
+ self.assertIn(leaf_project, user_projects)
+
+ # Disable OS-INHERIT extension
+ self.config_fixture.config(group='os_inherit', enabled=False)
+ # Should get back just root project - due the direct role assignment
+ user_projects = self.assignment_api.list_projects_for_user(user['id'])
+ self.assertEqual(1, len(user_projects))
+ self.assertIn(root_project, user_projects)
+
+
+class FilterTests(filtering.FilterTests):
+ def test_list_entities_filtered(self):
+ for entity in ['user', 'group', 'project']:
+ # Create 20 entities
+ entity_list = self._create_test_data(entity, 20)
+
+ # Try filtering to get one an exact item out of the list
+ hints = driver_hints.Hints()
+ hints.add_filter('name', entity_list[10]['name'])
+ entities = self._list_entities(entity)(hints=hints)
+ self.assertEqual(1, len(entities))
+ self.assertEqual(entities[0]['id'], entity_list[10]['id'])
+ # Check the driver has removed the filter from the list hints
+ self.assertFalse(hints.get_exact_filter_by_name('name'))
+ self._delete_test_data(entity, entity_list)
+
+ def test_list_users_inexact_filtered(self):
+ # Create 20 users, some with specific names. We set the names at create
+ # time (rather than updating them), since the LDAP driver does not
+ # support name updates.
+ user_name_data = {
+ # user index: name for user
+ 5: 'The',
+ 6: 'The Ministry',
+ 7: 'The Ministry of',
+ 8: 'The Ministry of Silly',
+ 9: 'The Ministry of Silly Walks',
+ # ...and one for useful case insensitivity testing
+ 10: 'The ministry of silly walks OF'
+ }
+ user_list = self._create_test_data(
+ 'user', 20, domain_id=DEFAULT_DOMAIN_ID, name_dict=user_name_data)
+
+ hints = driver_hints.Hints()
+ hints.add_filter('name', 'ministry', comparator='contains')
+ users = self.identity_api.list_users(hints=hints)
+ self.assertEqual(5, len(users))
+ self._match_with_list(users, user_list,
+ list_start=6, list_end=11)
+ # TODO(henry-nash) Check inexact filter has been removed.
+
+ hints = driver_hints.Hints()
+ hints.add_filter('name', 'The', comparator='startswith')
+ users = self.identity_api.list_users(hints=hints)
+ self.assertEqual(6, len(users))
+ self._match_with_list(users, user_list,
+ list_start=5, list_end=11)
+ # TODO(henry-nash) Check inexact filter has been removed.
+
+ hints = driver_hints.Hints()
+ hints.add_filter('name', 'of', comparator='endswith')
+ users = self.identity_api.list_users(hints=hints)
+ self.assertEqual(2, len(users))
+ # We can't assume we will get back the users in any particular order
+ self.assertIn(user_list[7]['id'], [users[0]['id'], users[1]['id']])
+ self.assertIn(user_list[10]['id'], [users[0]['id'], users[1]['id']])
+ # TODO(henry-nash) Check inexact filter has been removed.
+
+ # TODO(henry-nash): Add some case sensitive tests. However,
+ # these would be hard to validate currently, since:
+ #
+ # For SQL, the issue is that MySQL 0.7, by default, is installed in
+ # case insensitive mode (which is what is run by default for our
+ # SQL backend tests). For production deployments. OpenStack
+ # assumes a case sensitive database. For these tests, therefore, we
+ # need to be able to check the sensitivity of the database so as to
+ # know whether to run case sensitive tests here.
+ #
+ # For LDAP/AD, although dependent on the schema being used, attributes
+ # are typically configured to be case aware, but not case sensitive.
+
+ self._delete_test_data('user', user_list)
+
+ def test_groups_for_user_filtered(self):
+ """Test use of filtering doesn't break groups_for_user listing.
+
+ Some backends may use filtering to achieve the list of groups for a
+ user, so test that it can combine a second filter.
+
+ Test Plan:
+
+ - Create 10 groups, some with names we can filter on
+ - Create 2 users
+ - Assign 1 of those users to most of the groups, including some of the
+ well known named ones
+ - Assign the other user to other groups as spoilers
+ - Ensure that when we list groups for users with a filter on the group
+ name, both restrictions have been enforced on what is returned.
+
+ """
+
+ number_of_groups = 10
+ group_name_data = {
+ # entity index: name for entity
+ 5: 'The',
+ 6: 'The Ministry',
+ 9: 'The Ministry of Silly Walks',
+ }
+ group_list = self._create_test_data(
+ 'group', number_of_groups,
+ domain_id=DEFAULT_DOMAIN_ID, name_dict=group_name_data)
+ user_list = self._create_test_data('user', 2)
+
+ for group in range(7):
+ # Create membership, including with two out of the three groups
+ # with well know names
+ self.identity_api.add_user_to_group(user_list[0]['id'],
+ group_list[group]['id'])
+ # ...and some spoiler memberships
+ for group in range(7, number_of_groups):
+ self.identity_api.add_user_to_group(user_list[1]['id'],
+ group_list[group]['id'])
+
+ hints = driver_hints.Hints()
+ hints.add_filter('name', 'The', comparator='startswith')
+ groups = self.identity_api.list_groups_for_user(
+ user_list[0]['id'], hints=hints)
+ # We should only get back 2 out of the 3 groups that start with 'The'
+ # hence showing that both "filters" have been applied
+ self.assertThat(len(groups), matchers.Equals(2))
+ self.assertIn(group_list[5]['id'], [groups[0]['id'], groups[1]['id']])
+ self.assertIn(group_list[6]['id'], [groups[0]['id'], groups[1]['id']])
+ self._delete_test_data('user', user_list)
+ self._delete_test_data('group', group_list)
+
+
+class LimitTests(filtering.FilterTests):
+ ENTITIES = ['user', 'group', 'project']
+
+ def setUp(self):
+ """Setup for Limit Test Cases."""
+
+ self.domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.resource_api.create_domain(self.domain1['id'], self.domain1)
+ self.addCleanup(self.clean_up_domain)
+
+ self.entity_lists = {}
+ self.domain1_entity_lists = {}
+
+ for entity in self.ENTITIES:
+ # Create 20 entities, 14 of which are in domain1
+ self.entity_lists[entity] = self._create_test_data(entity, 6)
+ self.domain1_entity_lists[entity] = self._create_test_data(
+ entity, 14, self.domain1['id'])
+ self.addCleanup(self.clean_up_entities)
+
+ def clean_up_domain(self):
+ """Clean up domain test data from Limit Test Cases."""
+
+ self.domain1['enabled'] = False
+ self.resource_api.update_domain(self.domain1['id'], self.domain1)
+ self.resource_api.delete_domain(self.domain1['id'])
+ del self.domain1
+
+ def clean_up_entities(self):
+ """Clean up entity test data from Limit Test Cases."""
+ for entity in self.ENTITIES:
+ self._delete_test_data(entity, self.entity_lists[entity])
+ self._delete_test_data(entity, self.domain1_entity_lists[entity])
+ del self.entity_lists
+ del self.domain1_entity_lists
+
+ def _test_list_entity_filtered_and_limited(self, entity):
+ self.config_fixture.config(list_limit=10)
+ # Should get back just 10 entities in domain1
+ hints = driver_hints.Hints()
+ hints.add_filter('domain_id', self.domain1['id'])
+ entities = self._list_entities(entity)(hints=hints)
+ self.assertEqual(hints.limit['limit'], len(entities))
+ self.assertTrue(hints.limit['truncated'])
+ self._match_with_list(entities, self.domain1_entity_lists[entity])
+
+ # Override with driver specific limit
+ if entity == 'project':
+ self.config_fixture.config(group='resource', list_limit=5)
+ else:
+ self.config_fixture.config(group='identity', list_limit=5)
+
+ # Should get back just 5 users in domain1
+ hints = driver_hints.Hints()
+ hints.add_filter('domain_id', self.domain1['id'])
+ entities = self._list_entities(entity)(hints=hints)
+ self.assertEqual(hints.limit['limit'], len(entities))
+ self._match_with_list(entities, self.domain1_entity_lists[entity])
+
+ # Finally, let's pretend we want to get the full list of entities,
+ # even with the limits set, as part of some internal calculation.
+ # Calling the API without a hints list should achieve this, and
+ # return at least the 20 entries we created (there may be other
+ # entities lying around created by other tests/setup).
+ entities = self._list_entities(entity)()
+ self.assertTrue(len(entities) >= 20)
+
+ def test_list_users_filtered_and_limited(self):
+ self._test_list_entity_filtered_and_limited('user')
+
+ def test_list_groups_filtered_and_limited(self):
+ self._test_list_entity_filtered_and_limited('group')
+
+ def test_list_projects_filtered_and_limited(self):
+ self._test_list_entity_filtered_and_limited('project')
diff --git a/keystone-moon/keystone/tests/unit/test_backend_endpoint_policy.py b/keystone-moon/keystone/tests/unit/test_backend_endpoint_policy.py
new file mode 100644
index 00000000..cc41d977
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/test_backend_endpoint_policy.py
@@ -0,0 +1,247 @@
+# Copyright 2014 IBM Corp.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import uuid
+
+from testtools import matchers
+
+from keystone import exception
+
+
+class PolicyAssociationTests(object):
+
+ def _assert_correct_policy(self, endpoint, policy):
+ ref = (
+ self.endpoint_policy_api.get_policy_for_endpoint(endpoint['id']))
+ self.assertEqual(policy['id'], ref['id'])
+
+ def _assert_correct_endpoints(self, policy, endpoint_list):
+ endpoint_id_list = [ep['id'] for ep in endpoint_list]
+ endpoints = (
+ self.endpoint_policy_api.list_endpoints_for_policy(policy['id']))
+ self.assertThat(endpoints, matchers.HasLength(len(endpoint_list)))
+ for endpoint in endpoints:
+ self.assertIn(endpoint['id'], endpoint_id_list)
+
+ def load_sample_data(self):
+ """Create sample data to test policy associations.
+
+ The following data is created:
+
+ - 3 regions, in a hierarchy, 0 -> 1 -> 2 (where 0 is top)
+ - 3 services
+ - 6 endpoints, 2 in each region, with a mixture of services:
+ 0 - region 0, Service 0
+ 1 - region 0, Service 1
+ 2 - region 1, Service 1
+ 3 - region 1, Service 2
+ 4 - region 2, Service 2
+ 5 - region 2, Service 0
+
+ """
+
+ def new_endpoint(region_id, service_id):
+ endpoint = {'id': uuid.uuid4().hex, 'interface': 'test',
+ 'region_id': region_id, 'service_id': service_id,
+ 'url': '/url'}
+ self.endpoint.append(self.catalog_api.create_endpoint(
+ endpoint['id'], endpoint))
+
+ self.policy = []
+ self.endpoint = []
+ self.service = []
+ self.region = []
+ for i in range(3):
+ policy = {'id': uuid.uuid4().hex, 'type': uuid.uuid4().hex,
+ 'blob': {'data': uuid.uuid4().hex}}
+ self.policy.append(self.policy_api.create_policy(policy['id'],
+ policy))
+ service = {'id': uuid.uuid4().hex, 'type': uuid.uuid4().hex}
+ self.service.append(self.catalog_api.create_service(service['id'],
+ service))
+ region = {'id': uuid.uuid4().hex, 'description': uuid.uuid4().hex}
+ # Link the 3 regions together as a hierarchy, [0] at the top
+ if i != 0:
+ region['parent_region_id'] = self.region[i - 1]['id']
+ self.region.append(self.catalog_api.create_region(region))
+
+ new_endpoint(self.region[0]['id'], self.service[0]['id'])
+ new_endpoint(self.region[0]['id'], self.service[1]['id'])
+ new_endpoint(self.region[1]['id'], self.service[1]['id'])
+ new_endpoint(self.region[1]['id'], self.service[2]['id'])
+ new_endpoint(self.region[2]['id'], self.service[2]['id'])
+ new_endpoint(self.region[2]['id'], self.service[0]['id'])
+
+ def test_policy_to_endpoint_association_crud(self):
+ self.endpoint_policy_api.create_policy_association(
+ self.policy[0]['id'], endpoint_id=self.endpoint[0]['id'])
+ self.endpoint_policy_api.check_policy_association(
+ self.policy[0]['id'], endpoint_id=self.endpoint[0]['id'])
+ self.endpoint_policy_api.delete_policy_association(
+ self.policy[0]['id'], endpoint_id=self.endpoint[0]['id'])
+ self.assertRaises(exception.NotFound,
+ self.endpoint_policy_api.check_policy_association,
+ self.policy[0]['id'],
+ endpoint_id=self.endpoint[0]['id'])
+
+ def test_overwriting_policy_to_endpoint_association(self):
+ self.endpoint_policy_api.create_policy_association(
+ self.policy[0]['id'], endpoint_id=self.endpoint[0]['id'])
+ self.endpoint_policy_api.create_policy_association(
+ self.policy[1]['id'], endpoint_id=self.endpoint[0]['id'])
+ self.assertRaises(exception.NotFound,
+ self.endpoint_policy_api.check_policy_association,
+ self.policy[0]['id'],
+ endpoint_id=self.endpoint[0]['id'])
+ self.endpoint_policy_api.check_policy_association(
+ self.policy[1]['id'], endpoint_id=self.endpoint[0]['id'])
+
+ def test_invalid_policy_to_endpoint_association(self):
+ self.assertRaises(exception.InvalidPolicyAssociation,
+ self.endpoint_policy_api.create_policy_association,
+ self.policy[0]['id'])
+ self.assertRaises(exception.InvalidPolicyAssociation,
+ self.endpoint_policy_api.create_policy_association,
+ self.policy[0]['id'],
+ endpoint_id=self.endpoint[0]['id'],
+ region_id=self.region[0]['id'])
+ self.assertRaises(exception.InvalidPolicyAssociation,
+ self.endpoint_policy_api.create_policy_association,
+ self.policy[0]['id'],
+ endpoint_id=self.endpoint[0]['id'],
+ service_id=self.service[0]['id'])
+ self.assertRaises(exception.InvalidPolicyAssociation,
+ self.endpoint_policy_api.create_policy_association,
+ self.policy[0]['id'],
+ region_id=self.region[0]['id'])
+
+ def test_policy_to_explicit_endpoint_association(self):
+ # Associate policy 0 with endpoint 0
+ self.endpoint_policy_api.create_policy_association(
+ self.policy[0]['id'], endpoint_id=self.endpoint[0]['id'])
+ self._assert_correct_policy(self.endpoint[0], self.policy[0])
+ self._assert_correct_endpoints(self.policy[0], [self.endpoint[0]])
+ self.assertRaises(exception.NotFound,
+ self.endpoint_policy_api.get_policy_for_endpoint,
+ uuid.uuid4().hex)
+
+ def test_policy_to_service_association(self):
+ self.endpoint_policy_api.create_policy_association(
+ self.policy[0]['id'], service_id=self.service[0]['id'])
+ self.endpoint_policy_api.create_policy_association(
+ self.policy[1]['id'], service_id=self.service[1]['id'])
+
+ # Endpoints 0 and 5 are part of service 0
+ self._assert_correct_policy(self.endpoint[0], self.policy[0])
+ self._assert_correct_policy(self.endpoint[5], self.policy[0])
+ self._assert_correct_endpoints(
+ self.policy[0], [self.endpoint[0], self.endpoint[5]])
+
+ # Endpoints 1 and 2 are part of service 1
+ self._assert_correct_policy(self.endpoint[1], self.policy[1])
+ self._assert_correct_policy(self.endpoint[2], self.policy[1])
+ self._assert_correct_endpoints(
+ self.policy[1], [self.endpoint[1], self.endpoint[2]])
+
+ def test_policy_to_region_and_service_association(self):
+ self.endpoint_policy_api.create_policy_association(
+ self.policy[0]['id'], service_id=self.service[0]['id'],
+ region_id=self.region[0]['id'])
+ self.endpoint_policy_api.create_policy_association(
+ self.policy[1]['id'], service_id=self.service[1]['id'],
+ region_id=self.region[1]['id'])
+ self.endpoint_policy_api.create_policy_association(
+ self.policy[2]['id'], service_id=self.service[2]['id'],
+ region_id=self.region[2]['id'])
+
+ # Endpoint 0 is in region 0 with service 0, so should get policy 0
+ self._assert_correct_policy(self.endpoint[0], self.policy[0])
+ # Endpoint 5 is in Region 2 with service 0, so should also get
+ # policy 0 by searching up the tree to Region 0
+ self._assert_correct_policy(self.endpoint[5], self.policy[0])
+
+ # Looking the other way round, policy 2 should only be in use by
+ # endpoint 4, since that's the only endpoint in region 2 with the
+ # correct service
+ self._assert_correct_endpoints(
+ self.policy[2], [self.endpoint[4]])
+ # Policy 1 should only be in use by endpoint 2, since that's the only
+ # endpoint in region 1 (and region 2 below it) with the correct service
+ self._assert_correct_endpoints(
+ self.policy[1], [self.endpoint[2]])
+ # Policy 0 should be in use by endpoint 0, as well as 5 (since 5 is
+ # of the correct service and in region 2 below it)
+ self._assert_correct_endpoints(
+ self.policy[0], [self.endpoint[0], self.endpoint[5]])
+
+ def test_delete_association_by_entity(self):
+ self.endpoint_policy_api.create_policy_association(
+ self.policy[0]['id'], endpoint_id=self.endpoint[0]['id'])
+ self.endpoint_policy_api.delete_association_by_endpoint(
+ self.endpoint[0]['id'])
+ self.assertRaises(exception.NotFound,
+ self.endpoint_policy_api.check_policy_association,
+ self.policy[0]['id'],
+ endpoint_id=self.endpoint[0]['id'])
+ # Make sure deleting it again is silent - since this method is used
+ # in response to notifications by the controller.
+ self.endpoint_policy_api.delete_association_by_endpoint(
+ self.endpoint[0]['id'])
+
+ # Now try with service - ensure both combined region & service
+ # associations and explicit service ones are removed
+ self.endpoint_policy_api.create_policy_association(
+ self.policy[0]['id'], service_id=self.service[0]['id'],
+ region_id=self.region[0]['id'])
+ self.endpoint_policy_api.create_policy_association(
+ self.policy[1]['id'], service_id=self.service[0]['id'],
+ region_id=self.region[1]['id'])
+ self.endpoint_policy_api.create_policy_association(
+ self.policy[0]['id'], service_id=self.service[0]['id'])
+
+ self.endpoint_policy_api.delete_association_by_service(
+ self.service[0]['id'])
+
+ self.assertRaises(exception.NotFound,
+ self.endpoint_policy_api.check_policy_association,
+ self.policy[0]['id'],
+ service_id=self.service[0]['id'],
+ region_id=self.region[0]['id'])
+ self.assertRaises(exception.NotFound,
+ self.endpoint_policy_api.check_policy_association,
+ self.policy[1]['id'],
+ service_id=self.service[0]['id'],
+ region_id=self.region[1]['id'])
+ self.assertRaises(exception.NotFound,
+ self.endpoint_policy_api.check_policy_association,
+ self.policy[0]['id'],
+ service_id=self.service[0]['id'])
+
+ # Finally, check delete by region
+ self.endpoint_policy_api.create_policy_association(
+ self.policy[0]['id'], service_id=self.service[0]['id'],
+ region_id=self.region[0]['id'])
+
+ self.endpoint_policy_api.delete_association_by_region(
+ self.region[0]['id'])
+
+ self.assertRaises(exception.NotFound,
+ self.endpoint_policy_api.check_policy_association,
+ self.policy[0]['id'],
+ service_id=self.service[0]['id'],
+ region_id=self.region[0]['id'])
+ self.assertRaises(exception.NotFound,
+ self.endpoint_policy_api.check_policy_association,
+ self.policy[0]['id'],
+ service_id=self.service[0]['id'])
diff --git a/keystone-moon/keystone/tests/unit/test_backend_endpoint_policy_sql.py b/keystone-moon/keystone/tests/unit/test_backend_endpoint_policy_sql.py
new file mode 100644
index 00000000..dab02859
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/test_backend_endpoint_policy_sql.py
@@ -0,0 +1,37 @@
+# Copyright 2014 IBM Corp.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from keystone.common import sql
+from keystone.tests.unit import test_backend_endpoint_policy
+from keystone.tests.unit import test_backend_sql
+
+
+class SqlPolicyAssociationTable(test_backend_sql.SqlModels):
+ """Set of tests for checking SQL Policy Association Mapping."""
+
+ def test_policy_association_mapping(self):
+ cols = (('policy_id', sql.String, 64),
+ ('endpoint_id', sql.String, 64),
+ ('service_id', sql.String, 64),
+ ('region_id', sql.String, 64))
+ self.assertExpectedSchema('policy_association', cols)
+
+
+class SqlPolicyAssociationTests(
+ test_backend_sql.SqlTests,
+ test_backend_endpoint_policy.PolicyAssociationTests):
+
+ def load_fixtures(self, fixtures):
+ super(SqlPolicyAssociationTests, self).load_fixtures(fixtures)
+ self.load_sample_data()
diff --git a/keystone-moon/keystone/tests/unit/test_backend_federation_sql.py b/keystone-moon/keystone/tests/unit/test_backend_federation_sql.py
new file mode 100644
index 00000000..48ebad6c
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/test_backend_federation_sql.py
@@ -0,0 +1,46 @@
+# Copyright 2013 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from keystone.common import sql
+from keystone.tests.unit import test_backend_sql
+
+
+class SqlFederation(test_backend_sql.SqlModels):
+ """Set of tests for checking SQL Federation."""
+
+ def test_identity_provider(self):
+ cols = (('id', sql.String, 64),
+ ('remote_id', sql.String, 256),
+ ('enabled', sql.Boolean, None),
+ ('description', sql.Text, None))
+ self.assertExpectedSchema('identity_provider', cols)
+
+ def test_federated_protocol(self):
+ cols = (('id', sql.String, 64),
+ ('idp_id', sql.String, 64),
+ ('mapping_id', sql.String, 64))
+ self.assertExpectedSchema('federation_protocol', cols)
+
+ def test_mapping(self):
+ cols = (('id', sql.String, 64),
+ ('rules', sql.JsonBlob, None))
+ self.assertExpectedSchema('mapping', cols)
+
+ def test_service_provider(self):
+ cols = (('auth_url', sql.String, 256),
+ ('id', sql.String, 64),
+ ('enabled', sql.Boolean, None),
+ ('description', sql.Text, None),
+ ('sp_url', sql.String, 256))
+ self.assertExpectedSchema('service_provider', cols)
diff --git a/keystone-moon/keystone/tests/unit/test_backend_id_mapping_sql.py b/keystone-moon/keystone/tests/unit/test_backend_id_mapping_sql.py
new file mode 100644
index 00000000..6b691e5a
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/test_backend_id_mapping_sql.py
@@ -0,0 +1,197 @@
+# -*- coding: utf-8 -*-
+# Copyright 2014 IBM Corp.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import uuid
+
+from testtools import matchers
+
+from keystone.common import sql
+from keystone.identity.mapping_backends import mapping
+from keystone.tests.unit import identity_mapping as mapping_sql
+from keystone.tests.unit import test_backend_sql
+
+
+class SqlIDMappingTable(test_backend_sql.SqlModels):
+ """Set of tests for checking SQL Identity ID Mapping."""
+
+ def test_id_mapping(self):
+ cols = (('public_id', sql.String, 64),
+ ('domain_id', sql.String, 64),
+ ('local_id', sql.String, 64),
+ ('entity_type', sql.Enum, None))
+ self.assertExpectedSchema('id_mapping', cols)
+
+
+class SqlIDMapping(test_backend_sql.SqlTests):
+
+ def setUp(self):
+ super(SqlIDMapping, self).setUp()
+ self.load_sample_data()
+
+ def load_sample_data(self):
+ self.addCleanup(self.clean_sample_data)
+ domainA = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.domainA = self.resource_api.create_domain(domainA['id'], domainA)
+ domainB = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.domainB = self.resource_api.create_domain(domainB['id'], domainB)
+
+ def clean_sample_data(self):
+ if hasattr(self, 'domainA'):
+ self.domainA['enabled'] = False
+ self.resource_api.update_domain(self.domainA['id'], self.domainA)
+ self.resource_api.delete_domain(self.domainA['id'])
+ if hasattr(self, 'domainB'):
+ self.domainB['enabled'] = False
+ self.resource_api.update_domain(self.domainB['id'], self.domainB)
+ self.resource_api.delete_domain(self.domainB['id'])
+
+ def test_invalid_public_key(self):
+ self.assertIsNone(self.id_mapping_api.get_id_mapping(uuid.uuid4().hex))
+
+ def test_id_mapping_crud(self):
+ initial_mappings = len(mapping_sql.list_id_mappings())
+ local_id1 = uuid.uuid4().hex
+ local_id2 = uuid.uuid4().hex
+ local_entity1 = {'domain_id': self.domainA['id'],
+ 'local_id': local_id1,
+ 'entity_type': mapping.EntityType.USER}
+ local_entity2 = {'domain_id': self.domainB['id'],
+ 'local_id': local_id2,
+ 'entity_type': mapping.EntityType.GROUP}
+
+ # Check no mappings for the new local entities
+ self.assertIsNone(self.id_mapping_api.get_public_id(local_entity1))
+ self.assertIsNone(self.id_mapping_api.get_public_id(local_entity2))
+
+ # Create the new mappings and then read them back
+ public_id1 = self.id_mapping_api.create_id_mapping(local_entity1)
+ public_id2 = self.id_mapping_api.create_id_mapping(local_entity2)
+ self.assertThat(mapping_sql.list_id_mappings(),
+ matchers.HasLength(initial_mappings + 2))
+ self.assertEqual(
+ public_id1, self.id_mapping_api.get_public_id(local_entity1))
+ self.assertEqual(
+ public_id2, self.id_mapping_api.get_public_id(local_entity2))
+
+ local_id_ref = self.id_mapping_api.get_id_mapping(public_id1)
+ self.assertEqual(self.domainA['id'], local_id_ref['domain_id'])
+ self.assertEqual(local_id1, local_id_ref['local_id'])
+ self.assertEqual(mapping.EntityType.USER, local_id_ref['entity_type'])
+ # Check we have really created a new external ID
+ self.assertNotEqual(local_id1, public_id1)
+
+ local_id_ref = self.id_mapping_api.get_id_mapping(public_id2)
+ self.assertEqual(self.domainB['id'], local_id_ref['domain_id'])
+ self.assertEqual(local_id2, local_id_ref['local_id'])
+ self.assertEqual(mapping.EntityType.GROUP, local_id_ref['entity_type'])
+ # Check we have really created a new external ID
+ self.assertNotEqual(local_id2, public_id2)
+
+ # Create another mappings, this time specifying a public ID to use
+ new_public_id = uuid.uuid4().hex
+ public_id3 = self.id_mapping_api.create_id_mapping(
+ {'domain_id': self.domainB['id'], 'local_id': local_id2,
+ 'entity_type': mapping.EntityType.USER},
+ public_id=new_public_id)
+ self.assertEqual(new_public_id, public_id3)
+ self.assertThat(mapping_sql.list_id_mappings(),
+ matchers.HasLength(initial_mappings + 3))
+
+ # Delete the mappings we created, and make sure the mapping count
+ # goes back to where it was
+ self.id_mapping_api.delete_id_mapping(public_id1)
+ self.id_mapping_api.delete_id_mapping(public_id2)
+ self.id_mapping_api.delete_id_mapping(public_id3)
+ self.assertThat(mapping_sql.list_id_mappings(),
+ matchers.HasLength(initial_mappings))
+
+ def test_id_mapping_handles_unicode(self):
+ initial_mappings = len(mapping_sql.list_id_mappings())
+ local_id = u'fäké1'
+ local_entity = {'domain_id': self.domainA['id'],
+ 'local_id': local_id,
+ 'entity_type': mapping.EntityType.USER}
+
+ # Check no mappings for the new local entity
+ self.assertIsNone(self.id_mapping_api.get_public_id(local_entity))
+
+ # Create the new mapping and then read it back
+ public_id = self.id_mapping_api.create_id_mapping(local_entity)
+ self.assertThat(mapping_sql.list_id_mappings(),
+ matchers.HasLength(initial_mappings + 1))
+ self.assertEqual(
+ public_id, self.id_mapping_api.get_public_id(local_entity))
+
+ def test_delete_public_id_is_silent(self):
+ # Test that deleting an invalid public key is silent
+ self.id_mapping_api.delete_id_mapping(uuid.uuid4().hex)
+
+ def test_purge_mappings(self):
+ initial_mappings = len(mapping_sql.list_id_mappings())
+ local_id1 = uuid.uuid4().hex
+ local_id2 = uuid.uuid4().hex
+ local_id3 = uuid.uuid4().hex
+ local_id4 = uuid.uuid4().hex
+ local_id5 = uuid.uuid4().hex
+
+ # Create five mappings,two in domainA, three in domainB
+ self.id_mapping_api.create_id_mapping(
+ {'domain_id': self.domainA['id'], 'local_id': local_id1,
+ 'entity_type': mapping.EntityType.USER})
+ self.id_mapping_api.create_id_mapping(
+ {'domain_id': self.domainA['id'], 'local_id': local_id2,
+ 'entity_type': mapping.EntityType.USER})
+ public_id3 = self.id_mapping_api.create_id_mapping(
+ {'domain_id': self.domainB['id'], 'local_id': local_id3,
+ 'entity_type': mapping.EntityType.GROUP})
+ public_id4 = self.id_mapping_api.create_id_mapping(
+ {'domain_id': self.domainB['id'], 'local_id': local_id4,
+ 'entity_type': mapping.EntityType.USER})
+ public_id5 = self.id_mapping_api.create_id_mapping(
+ {'domain_id': self.domainB['id'], 'local_id': local_id5,
+ 'entity_type': mapping.EntityType.USER})
+
+ self.assertThat(mapping_sql.list_id_mappings(),
+ matchers.HasLength(initial_mappings + 5))
+
+ # Purge mappings for domainA, should be left with those in B
+ self.id_mapping_api.purge_mappings(
+ {'domain_id': self.domainA['id']})
+ self.assertThat(mapping_sql.list_id_mappings(),
+ matchers.HasLength(initial_mappings + 3))
+ self.id_mapping_api.get_id_mapping(public_id3)
+ self.id_mapping_api.get_id_mapping(public_id4)
+ self.id_mapping_api.get_id_mapping(public_id5)
+
+ # Purge mappings for type Group, should purge one more
+ self.id_mapping_api.purge_mappings(
+ {'entity_type': mapping.EntityType.GROUP})
+ self.assertThat(mapping_sql.list_id_mappings(),
+ matchers.HasLength(initial_mappings + 2))
+ self.id_mapping_api.get_id_mapping(public_id4)
+ self.id_mapping_api.get_id_mapping(public_id5)
+
+ # Purge mapping for a specific local identifier
+ self.id_mapping_api.purge_mappings(
+ {'domain_id': self.domainB['id'], 'local_id': local_id4,
+ 'entity_type': mapping.EntityType.USER})
+ self.assertThat(mapping_sql.list_id_mappings(),
+ matchers.HasLength(initial_mappings + 1))
+ self.id_mapping_api.get_id_mapping(public_id5)
+
+ # Purge mappings the remaining mappings
+ self.id_mapping_api.purge_mappings({})
+ self.assertThat(mapping_sql.list_id_mappings(),
+ matchers.HasLength(initial_mappings))
diff --git a/keystone-moon/keystone/tests/unit/test_backend_kvs.py b/keystone-moon/keystone/tests/unit/test_backend_kvs.py
new file mode 100644
index 00000000..c0997ad9
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/test_backend_kvs.py
@@ -0,0 +1,172 @@
+# Copyright 2012 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+import datetime
+import uuid
+
+from oslo_config import cfg
+from oslo_utils import timeutils
+import six
+
+from keystone import exception
+from keystone.tests import unit as tests
+from keystone.tests.unit import test_backend
+
+
+CONF = cfg.CONF
+
+
+class KvsToken(tests.TestCase, test_backend.TokenTests):
+ def setUp(self):
+ super(KvsToken, self).setUp()
+ self.load_backends()
+
+ def test_flush_expired_token(self):
+ self.assertRaises(
+ exception.NotImplemented,
+ self.token_provider_api._persistence.flush_expired_tokens)
+
+ def _update_user_token_index_direct(self, user_key, token_id, new_data):
+ persistence = self.token_provider_api._persistence
+ token_list = persistence.driver._get_user_token_list_with_expiry(
+ user_key)
+ # Update the user-index so that the expires time is _actually_ expired
+ # since we do not do an explicit get on the token, we only reference
+ # the data in the user index (to save extra round-trips to the kvs
+ # backend).
+ for i, data in enumerate(token_list):
+ if data[0] == token_id:
+ token_list[i] = new_data
+ break
+ self.token_provider_api._persistence.driver._store.set(user_key,
+ token_list)
+
+ def test_cleanup_user_index_on_create(self):
+ user_id = six.text_type(uuid.uuid4().hex)
+ valid_token_id, data = self.create_token_sample_data(user_id=user_id)
+ expired_token_id, expired_data = self.create_token_sample_data(
+ user_id=user_id)
+
+ expire_delta = datetime.timedelta(seconds=86400)
+
+ # NOTE(morganfainberg): Directly access the data cache since we need to
+ # get expired tokens as well as valid tokens.
+ token_persistence = self.token_provider_api._persistence
+ user_key = token_persistence.driver._prefix_user_id(user_id)
+ user_token_list = token_persistence.driver._store.get(user_key)
+ valid_token_ref = token_persistence.get_token(valid_token_id)
+ expired_token_ref = token_persistence.get_token(expired_token_id)
+ expected_user_token_list = [
+ (valid_token_id, timeutils.isotime(valid_token_ref['expires'],
+ subsecond=True)),
+ (expired_token_id, timeutils.isotime(expired_token_ref['expires'],
+ subsecond=True))]
+ self.assertEqual(expected_user_token_list, user_token_list)
+ new_expired_data = (expired_token_id,
+ timeutils.isotime(
+ (timeutils.utcnow() - expire_delta),
+ subsecond=True))
+ self._update_user_token_index_direct(user_key, expired_token_id,
+ new_expired_data)
+ valid_token_id_2, valid_data_2 = self.create_token_sample_data(
+ user_id=user_id)
+ valid_token_ref_2 = token_persistence.get_token(valid_token_id_2)
+ expected_user_token_list = [
+ (valid_token_id, timeutils.isotime(valid_token_ref['expires'],
+ subsecond=True)),
+ (valid_token_id_2, timeutils.isotime(valid_token_ref_2['expires'],
+ subsecond=True))]
+ user_token_list = token_persistence.driver._store.get(user_key)
+ self.assertEqual(expected_user_token_list, user_token_list)
+
+ # Test that revoked tokens are removed from the list on create.
+ token_persistence.delete_token(valid_token_id_2)
+ new_token_id, data = self.create_token_sample_data(user_id=user_id)
+ new_token_ref = token_persistence.get_token(new_token_id)
+ expected_user_token_list = [
+ (valid_token_id, timeutils.isotime(valid_token_ref['expires'],
+ subsecond=True)),
+ (new_token_id, timeutils.isotime(new_token_ref['expires'],
+ subsecond=True))]
+ user_token_list = token_persistence.driver._store.get(user_key)
+ self.assertEqual(expected_user_token_list, user_token_list)
+
+
+class KvsCatalog(tests.TestCase, test_backend.CatalogTests):
+ def setUp(self):
+ super(KvsCatalog, self).setUp()
+ self.load_backends()
+ self._load_fake_catalog()
+
+ def config_overrides(self):
+ super(KvsCatalog, self).config_overrides()
+ self.config_fixture.config(
+ group='catalog',
+ driver='keystone.catalog.backends.kvs.Catalog')
+
+ def _load_fake_catalog(self):
+ self.catalog_foobar = self.catalog_api.driver._create_catalog(
+ 'foo', 'bar',
+ {'RegionFoo': {'service_bar': {'foo': 'bar'}}})
+
+ def test_get_catalog_404(self):
+ # FIXME(dolph): this test should be moved up to test_backend
+ # FIXME(dolph): exceptions should be UserNotFound and ProjectNotFound
+ self.assertRaises(exception.NotFound,
+ self.catalog_api.get_catalog,
+ uuid.uuid4().hex,
+ 'bar')
+
+ self.assertRaises(exception.NotFound,
+ self.catalog_api.get_catalog,
+ 'foo',
+ uuid.uuid4().hex)
+
+ def test_get_catalog(self):
+ catalog_ref = self.catalog_api.get_catalog('foo', 'bar')
+ self.assertDictEqual(catalog_ref, self.catalog_foobar)
+
+ def test_get_catalog_endpoint_disabled(self):
+ # This test doesn't apply to KVS because with the KVS backend the
+ # application creates the catalog (including the endpoints) for each
+ # user and project. Whether endpoints are enabled or disabled isn't
+ # a consideration.
+ f = super(KvsCatalog, self).test_get_catalog_endpoint_disabled
+ self.assertRaises(exception.NotFound, f)
+
+ def test_get_v3_catalog_endpoint_disabled(self):
+ # There's no need to have disabled endpoints in the kvs catalog. Those
+ # endpoints should just be removed from the store. This just tests
+ # what happens currently when the super impl is called.
+ f = super(KvsCatalog, self).test_get_v3_catalog_endpoint_disabled
+ self.assertRaises(exception.NotFound, f)
+
+ def test_list_regions_filtered_by_parent_region_id(self):
+ self.skipTest('KVS backend does not support hints')
+
+ def test_service_filtering(self):
+ self.skipTest("kvs backend doesn't support filtering")
+
+
+class KvsTokenCacheInvalidation(tests.TestCase,
+ test_backend.TokenCacheInvalidation):
+ def setUp(self):
+ super(KvsTokenCacheInvalidation, self).setUp()
+ self.load_backends()
+ self._create_test_data()
+
+ def config_overrides(self):
+ super(KvsTokenCacheInvalidation, self).config_overrides()
+ self.config_fixture.config(
+ group='token',
+ driver='keystone.token.persistence.backends.kvs.Token')
diff --git a/keystone-moon/keystone/tests/unit/test_backend_ldap.py b/keystone-moon/keystone/tests/unit/test_backend_ldap.py
new file mode 100644
index 00000000..10119808
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/test_backend_ldap.py
@@ -0,0 +1,3049 @@
+# -*- coding: utf-8 -*-
+# Copyright 2012 OpenStack Foundation
+# Copyright 2013 IBM Corp.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import copy
+import uuid
+
+import ldap
+import mock
+from oslo_config import cfg
+from testtools import matchers
+
+from keystone.common import cache
+from keystone.common import ldap as common_ldap
+from keystone.common.ldap import core as common_ldap_core
+from keystone.common import sql
+from keystone import exception
+from keystone import identity
+from keystone.identity.mapping_backends import mapping as map
+from keystone import resource
+from keystone.tests import unit as tests
+from keystone.tests.unit import default_fixtures
+from keystone.tests.unit import fakeldap
+from keystone.tests.unit import identity_mapping as mapping_sql
+from keystone.tests.unit.ksfixtures import database
+from keystone.tests.unit import test_backend
+
+
+CONF = cfg.CONF
+
+
+def create_group_container(identity_api):
+ # Create the groups base entry (ou=Groups,cn=example,cn=com)
+ group_api = identity_api.driver.group
+ conn = group_api.get_connection()
+ dn = 'ou=Groups,cn=example,cn=com'
+ conn.add_s(dn, [('objectclass', ['organizationalUnit']),
+ ('ou', ['Groups'])])
+
+
+class BaseLDAPIdentity(test_backend.IdentityTests):
+
+ def setUp(self):
+ super(BaseLDAPIdentity, self).setUp()
+ self.clear_database()
+
+ common_ldap.register_handler('fake://', fakeldap.FakeLdap)
+ self.load_backends()
+ self.load_fixtures(default_fixtures)
+
+ self.addCleanup(common_ldap_core._HANDLERS.clear)
+
+ def _get_domain_fixture(self):
+ """Domains in LDAP are read-only, so just return the static one."""
+ return self.resource_api.get_domain(CONF.identity.default_domain_id)
+
+ def clear_database(self):
+ for shelf in fakeldap.FakeShelves:
+ fakeldap.FakeShelves[shelf].clear()
+
+ def reload_backends(self, domain_id):
+ # Only one backend unless we are using separate domain backends
+ self.load_backends()
+
+ def get_config(self, domain_id):
+ # Only one conf structure unless we are using separate domain backends
+ return CONF
+
+ def config_overrides(self):
+ super(BaseLDAPIdentity, self).config_overrides()
+ self.config_fixture.config(
+ group='identity',
+ driver='keystone.identity.backends.ldap.Identity')
+
+ def config_files(self):
+ config_files = super(BaseLDAPIdentity, self).config_files()
+ config_files.append(tests.dirs.tests_conf('backend_ldap.conf'))
+ return config_files
+
+ def get_user_enabled_vals(self, user):
+ user_dn = (
+ self.identity_api.driver.user._id_to_dn_string(user['id']))
+ enabled_attr_name = CONF.ldap.user_enabled_attribute
+
+ ldap_ = self.identity_api.driver.user.get_connection()
+ res = ldap_.search_s(user_dn,
+ ldap.SCOPE_BASE,
+ u'(sn=%s)' % user['name'])
+ if enabled_attr_name in res[0][1]:
+ return res[0][1][enabled_attr_name]
+ else:
+ return None
+
+ def test_build_tree(self):
+ """Regression test for building the tree names
+ """
+ user_api = identity.backends.ldap.UserApi(CONF)
+ self.assertTrue(user_api)
+ self.assertEqual("ou=Users,%s" % CONF.ldap.suffix, user_api.tree_dn)
+
+ def test_configurable_allowed_user_actions(self):
+ user = {'name': u'fäké1',
+ 'password': u'fäképass1',
+ 'domain_id': CONF.identity.default_domain_id,
+ 'tenants': ['bar']}
+ user = self.identity_api.create_user(user)
+ self.identity_api.get_user(user['id'])
+
+ user['password'] = u'fäképass2'
+ self.identity_api.update_user(user['id'], user)
+
+ self.identity_api.delete_user(user['id'])
+ self.assertRaises(exception.UserNotFound,
+ self.identity_api.get_user,
+ user['id'])
+
+ def test_configurable_forbidden_user_actions(self):
+ conf = self.get_config(CONF.identity.default_domain_id)
+ conf.ldap.user_allow_create = False
+ conf.ldap.user_allow_update = False
+ conf.ldap.user_allow_delete = False
+ self.reload_backends(CONF.identity.default_domain_id)
+
+ user = {'name': u'fäké1',
+ 'password': u'fäképass1',
+ 'domain_id': CONF.identity.default_domain_id,
+ 'tenants': ['bar']}
+ self.assertRaises(exception.ForbiddenAction,
+ self.identity_api.create_user,
+ user)
+
+ self.user_foo['password'] = u'fäképass2'
+ self.assertRaises(exception.ForbiddenAction,
+ self.identity_api.update_user,
+ self.user_foo['id'],
+ self.user_foo)
+
+ self.assertRaises(exception.ForbiddenAction,
+ self.identity_api.delete_user,
+ self.user_foo['id'])
+
+ def test_configurable_forbidden_create_existing_user(self):
+ conf = self.get_config(CONF.identity.default_domain_id)
+ conf.ldap.user_allow_create = False
+ self.reload_backends(CONF.identity.default_domain_id)
+
+ self.assertRaises(exception.ForbiddenAction,
+ self.identity_api.create_user,
+ self.user_foo)
+
+ def test_user_filter(self):
+ user_ref = self.identity_api.get_user(self.user_foo['id'])
+ self.user_foo.pop('password')
+ self.assertDictEqual(user_ref, self.user_foo)
+
+ conf = self.get_config(user_ref['domain_id'])
+ conf.ldap.user_filter = '(CN=DOES_NOT_MATCH)'
+ self.reload_backends(user_ref['domain_id'])
+ # invalidate the cache if the result is cached.
+ self.identity_api.get_user.invalidate(self.identity_api,
+ self.user_foo['id'])
+ self.assertRaises(exception.UserNotFound,
+ self.identity_api.get_user,
+ self.user_foo['id'])
+
+ def test_remove_role_grant_from_user_and_project(self):
+ self.assignment_api.create_grant(user_id=self.user_foo['id'],
+ project_id=self.tenant_baz['id'],
+ role_id='member')
+ roles_ref = self.assignment_api.list_grants(
+ user_id=self.user_foo['id'],
+ project_id=self.tenant_baz['id'])
+ self.assertDictEqual(roles_ref[0], self.role_member)
+
+ self.assignment_api.delete_grant(user_id=self.user_foo['id'],
+ project_id=self.tenant_baz['id'],
+ role_id='member')
+ roles_ref = self.assignment_api.list_grants(
+ user_id=self.user_foo['id'],
+ project_id=self.tenant_baz['id'])
+ self.assertEqual(0, len(roles_ref))
+ self.assertRaises(exception.RoleAssignmentNotFound,
+ self.assignment_api.delete_grant,
+ user_id=self.user_foo['id'],
+ project_id=self.tenant_baz['id'],
+ role_id='member')
+
+ def test_get_and_remove_role_grant_by_group_and_project(self):
+ new_domain = self._get_domain_fixture()
+ new_group = {'domain_id': new_domain['id'],
+ 'name': uuid.uuid4().hex}
+ new_group = self.identity_api.create_group(new_group)
+ new_user = {'name': 'new_user', 'enabled': True,
+ 'domain_id': new_domain['id']}
+ new_user = self.identity_api.create_user(new_user)
+ self.identity_api.add_user_to_group(new_user['id'],
+ new_group['id'])
+
+ roles_ref = self.assignment_api.list_grants(
+ group_id=new_group['id'],
+ project_id=self.tenant_bar['id'])
+ self.assertEqual([], roles_ref)
+ self.assertEqual(0, len(roles_ref))
+
+ self.assignment_api.create_grant(group_id=new_group['id'],
+ project_id=self.tenant_bar['id'],
+ role_id='member')
+ roles_ref = self.assignment_api.list_grants(
+ group_id=new_group['id'],
+ project_id=self.tenant_bar['id'])
+ self.assertNotEmpty(roles_ref)
+ self.assertDictEqual(roles_ref[0], self.role_member)
+
+ self.assignment_api.delete_grant(group_id=new_group['id'],
+ project_id=self.tenant_bar['id'],
+ role_id='member')
+ roles_ref = self.assignment_api.list_grants(
+ group_id=new_group['id'],
+ project_id=self.tenant_bar['id'])
+ self.assertEqual(0, len(roles_ref))
+ self.assertRaises(exception.RoleAssignmentNotFound,
+ self.assignment_api.delete_grant,
+ group_id=new_group['id'],
+ project_id=self.tenant_bar['id'],
+ role_id='member')
+
+ def test_get_and_remove_role_grant_by_group_and_domain(self):
+ self.skipTest('N/A: LDAP does not support multiple domains')
+
+ def test_get_role_assignment_by_domain_not_found(self):
+ self.skipTest('N/A: LDAP does not support multiple domains')
+
+ def test_del_role_assignment_by_domain_not_found(self):
+ self.skipTest('N/A: LDAP does not support multiple domains')
+
+ def test_get_and_remove_role_grant_by_user_and_domain(self):
+ self.skipTest('N/A: LDAP does not support multiple domains')
+
+ def test_get_and_remove_correct_role_grant_from_a_mix(self):
+ self.skipTest('Blocked by bug 1101287')
+
+ def test_get_and_remove_role_grant_by_group_and_cross_domain(self):
+ self.skipTest('N/A: LDAP does not support multiple domains')
+
+ def test_get_and_remove_role_grant_by_user_and_cross_domain(self):
+ self.skipTest('N/A: LDAP does not support multiple domains')
+
+ def test_role_grant_by_group_and_cross_domain_project(self):
+ self.skipTest('N/A: LDAP does not support multiple domains')
+
+ def test_role_grant_by_user_and_cross_domain_project(self):
+ self.skipTest('N/A: LDAP does not support multiple domains')
+
+ def test_multi_role_grant_by_user_group_on_project_domain(self):
+ self.skipTest('N/A: LDAP does not support multiple domains')
+
+ def test_delete_role_with_user_and_group_grants(self):
+ self.skipTest('Blocked by bug 1101287')
+
+ def test_delete_user_with_group_project_domain_links(self):
+ self.skipTest('N/A: LDAP does not support multiple domains')
+
+ def test_delete_group_with_user_project_domain_links(self):
+ self.skipTest('N/A: LDAP does not support multiple domains')
+
+ def test_list_projects_for_user(self):
+ domain = self._get_domain_fixture()
+ user1 = {'name': uuid.uuid4().hex, 'password': uuid.uuid4().hex,
+ 'domain_id': domain['id'], 'enabled': True}
+ user1 = self.identity_api.create_user(user1)
+ user_projects = self.assignment_api.list_projects_for_user(user1['id'])
+ self.assertThat(user_projects, matchers.HasLength(0))
+
+ # new grant(user1, role_member, tenant_bar)
+ self.assignment_api.create_grant(user_id=user1['id'],
+ project_id=self.tenant_bar['id'],
+ role_id=self.role_member['id'])
+ # new grant(user1, role_member, tenant_baz)
+ self.assignment_api.create_grant(user_id=user1['id'],
+ project_id=self.tenant_baz['id'],
+ role_id=self.role_member['id'])
+ user_projects = self.assignment_api.list_projects_for_user(user1['id'])
+ self.assertThat(user_projects, matchers.HasLength(2))
+
+ # Now, check number of projects through groups
+ user2 = {'name': uuid.uuid4().hex, 'password': uuid.uuid4().hex,
+ 'domain_id': domain['id'], 'enabled': True}
+ user2 = self.identity_api.create_user(user2)
+
+ group1 = {'name': uuid.uuid4().hex, 'domain_id': domain['id']}
+ group1 = self.identity_api.create_group(group1)
+
+ self.identity_api.add_user_to_group(user2['id'], group1['id'])
+
+ # new grant(group1(user2), role_member, tenant_bar)
+ self.assignment_api.create_grant(group_id=group1['id'],
+ project_id=self.tenant_bar['id'],
+ role_id=self.role_member['id'])
+ # new grant(group1(user2), role_member, tenant_baz)
+ self.assignment_api.create_grant(group_id=group1['id'],
+ project_id=self.tenant_baz['id'],
+ role_id=self.role_member['id'])
+ user_projects = self.assignment_api.list_projects_for_user(user2['id'])
+ self.assertThat(user_projects, matchers.HasLength(2))
+
+ # new grant(group1(user2), role_other, tenant_bar)
+ self.assignment_api.create_grant(group_id=group1['id'],
+ project_id=self.tenant_bar['id'],
+ role_id=self.role_other['id'])
+ user_projects = self.assignment_api.list_projects_for_user(user2['id'])
+ self.assertThat(user_projects, matchers.HasLength(2))
+
+ def test_list_projects_for_user_and_groups(self):
+ domain = self._get_domain_fixture()
+ # Create user1
+ user1 = {'name': uuid.uuid4().hex, 'password': uuid.uuid4().hex,
+ 'domain_id': domain['id'], 'enabled': True}
+ user1 = self.identity_api.create_user(user1)
+
+ # Create new group for user1
+ group1 = {'name': uuid.uuid4().hex, 'domain_id': domain['id']}
+ group1 = self.identity_api.create_group(group1)
+
+ # Add user1 to group1
+ self.identity_api.add_user_to_group(user1['id'], group1['id'])
+
+ # Now, add grant to user1 and group1 in tenant_bar
+ self.assignment_api.create_grant(user_id=user1['id'],
+ project_id=self.tenant_bar['id'],
+ role_id=self.role_member['id'])
+ self.assignment_api.create_grant(group_id=group1['id'],
+ project_id=self.tenant_bar['id'],
+ role_id=self.role_member['id'])
+
+ # The result is user1 has only one project granted
+ user_projects = self.assignment_api.list_projects_for_user(user1['id'])
+ self.assertThat(user_projects, matchers.HasLength(1))
+
+ # Now, delete user1 grant into tenant_bar and check
+ self.assignment_api.delete_grant(user_id=user1['id'],
+ project_id=self.tenant_bar['id'],
+ role_id=self.role_member['id'])
+
+ # The result is user1 has only one project granted.
+ # Granted through group1.
+ user_projects = self.assignment_api.list_projects_for_user(user1['id'])
+ self.assertThat(user_projects, matchers.HasLength(1))
+
+ def test_list_projects_for_user_with_grants(self):
+ domain = self._get_domain_fixture()
+ new_user = {'name': 'new_user', 'password': uuid.uuid4().hex,
+ 'enabled': True, 'domain_id': domain['id']}
+ new_user = self.identity_api.create_user(new_user)
+
+ group1 = {'name': uuid.uuid4().hex, 'domain_id': domain['id']}
+ group1 = self.identity_api.create_group(group1)
+ group2 = {'name': uuid.uuid4().hex, 'domain_id': domain['id']}
+ group2 = self.identity_api.create_group(group2)
+
+ project1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
+ 'domain_id': domain['id']}
+ self.resource_api.create_project(project1['id'], project1)
+ project2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
+ 'domain_id': domain['id']}
+ self.resource_api.create_project(project2['id'], project2)
+
+ self.identity_api.add_user_to_group(new_user['id'],
+ group1['id'])
+ self.identity_api.add_user_to_group(new_user['id'],
+ group2['id'])
+
+ self.assignment_api.create_grant(user_id=new_user['id'],
+ project_id=self.tenant_bar['id'],
+ role_id=self.role_member['id'])
+ self.assignment_api.create_grant(user_id=new_user['id'],
+ project_id=project1['id'],
+ role_id=self.role_admin['id'])
+ self.assignment_api.create_grant(group_id=group2['id'],
+ project_id=project2['id'],
+ role_id=self.role_admin['id'])
+
+ user_projects = self.assignment_api.list_projects_for_user(
+ new_user['id'])
+ self.assertEqual(3, len(user_projects))
+
+ def test_create_duplicate_user_name_in_different_domains(self):
+ self.skipTest('Domains are read-only against LDAP')
+
+ def test_create_duplicate_project_name_in_different_domains(self):
+ self.skipTest('Domains are read-only against LDAP')
+
+ def test_create_duplicate_group_name_in_different_domains(self):
+ self.skipTest(
+ 'N/A: LDAP does not support multiple domains')
+
+ def test_move_user_between_domains(self):
+ self.skipTest('Domains are read-only against LDAP')
+
+ def test_move_user_between_domains_with_clashing_names_fails(self):
+ self.skipTest('Domains are read-only against LDAP')
+
+ def test_move_group_between_domains(self):
+ self.skipTest(
+ 'N/A: LDAP does not support multiple domains')
+
+ def test_move_group_between_domains_with_clashing_names_fails(self):
+ self.skipTest('Domains are read-only against LDAP')
+
+ def test_move_project_between_domains(self):
+ self.skipTest('Domains are read-only against LDAP')
+
+ def test_move_project_between_domains_with_clashing_names_fails(self):
+ self.skipTest('Domains are read-only against LDAP')
+
+ def test_get_roles_for_user_and_domain(self):
+ self.skipTest('N/A: LDAP does not support multiple domains')
+
+ def test_get_roles_for_groups_on_domain(self):
+ self.skipTest('Blocked by bug: 1390125')
+
+ def test_get_roles_for_groups_on_project(self):
+ self.skipTest('Blocked by bug: 1390125')
+
+ def test_list_domains_for_groups(self):
+ self.skipTest('N/A: LDAP does not support multiple domains')
+
+ def test_list_projects_for_groups(self):
+ self.skipTest('Blocked by bug: 1390125')
+
+ def test_domain_delete_hierarchy(self):
+ self.skipTest('Domains are read-only against LDAP')
+
+ def test_list_role_assignments_unfiltered(self):
+ new_domain = self._get_domain_fixture()
+ new_user = {'name': uuid.uuid4().hex, 'password': uuid.uuid4().hex,
+ 'enabled': True, 'domain_id': new_domain['id']}
+ new_user = self.identity_api.create_user(new_user)
+ new_group = {'domain_id': new_domain['id'], 'name': uuid.uuid4().hex}
+ new_group = self.identity_api.create_group(new_group)
+ new_project = {'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'domain_id': new_domain['id']}
+ self.resource_api.create_project(new_project['id'], new_project)
+
+ # First check how many role grant already exist
+ existing_assignments = len(self.assignment_api.list_role_assignments())
+
+ self.assignment_api.create_grant(user_id=new_user['id'],
+ project_id=new_project['id'],
+ role_id='other')
+ self.assignment_api.create_grant(group_id=new_group['id'],
+ project_id=new_project['id'],
+ role_id='admin')
+
+ # Read back the list of assignments - check it is gone up by 2
+ after_assignments = len(self.assignment_api.list_role_assignments())
+ self.assertEqual(existing_assignments + 2, after_assignments)
+
+ def test_list_role_assignments_dumb_member(self):
+ self.config_fixture.config(group='ldap', use_dumb_member=True)
+ self.clear_database()
+ self.load_backends()
+ self.load_fixtures(default_fixtures)
+
+ new_domain = self._get_domain_fixture()
+ new_user = {'name': uuid.uuid4().hex, 'password': uuid.uuid4().hex,
+ 'enabled': True, 'domain_id': new_domain['id']}
+ new_user = self.identity_api.create_user(new_user)
+ new_project = {'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'domain_id': new_domain['id']}
+ self.resource_api.create_project(new_project['id'], new_project)
+ self.assignment_api.create_grant(user_id=new_user['id'],
+ project_id=new_project['id'],
+ role_id='other')
+
+ # Read back the list of assignments and ensure
+ # that the LDAP dumb member isn't listed.
+ assignment_ids = [a['user_id'] for a in
+ self.assignment_api.list_role_assignments()]
+ dumb_id = common_ldap.BaseLdap._dn_to_id(CONF.ldap.dumb_member)
+ self.assertNotIn(dumb_id, assignment_ids)
+
+ def test_list_user_ids_for_project_dumb_member(self):
+ self.config_fixture.config(group='ldap', use_dumb_member=True)
+ self.clear_database()
+ self.load_backends()
+ self.load_fixtures(default_fixtures)
+
+ user = {'name': uuid.uuid4().hex, 'password': uuid.uuid4().hex,
+ 'enabled': True, 'domain_id': test_backend.DEFAULT_DOMAIN_ID}
+
+ user = self.identity_api.create_user(user)
+ self.assignment_api.add_user_to_project(self.tenant_baz['id'],
+ user['id'])
+ user_ids = self.assignment_api.list_user_ids_for_project(
+ self.tenant_baz['id'])
+
+ self.assertIn(user['id'], user_ids)
+
+ dumb_id = common_ldap.BaseLdap._dn_to_id(CONF.ldap.dumb_member)
+ self.assertNotIn(dumb_id, user_ids)
+
+ def test_multi_group_grants_on_project_domain(self):
+ self.skipTest('Blocked by bug 1101287')
+
+ def test_list_group_members_missing_entry(self):
+ """List group members with deleted user.
+
+ If a group has a deleted entry for a member, the non-deleted members
+ are returned.
+
+ """
+
+ # Create a group
+ group = dict(name=uuid.uuid4().hex,
+ domain_id=CONF.identity.default_domain_id)
+ group_id = self.identity_api.create_group(group)['id']
+
+ # Create a couple of users and add them to the group.
+ user = dict(name=uuid.uuid4().hex,
+ domain_id=CONF.identity.default_domain_id)
+ user_1_id = self.identity_api.create_user(user)['id']
+
+ self.identity_api.add_user_to_group(user_1_id, group_id)
+
+ user = dict(name=uuid.uuid4().hex,
+ domain_id=CONF.identity.default_domain_id)
+ user_2_id = self.identity_api.create_user(user)['id']
+
+ self.identity_api.add_user_to_group(user_2_id, group_id)
+
+ # Delete user 2
+ # NOTE(blk-u): need to go directly to user interface to keep from
+ # updating the group.
+ unused, driver, entity_id = (
+ self.identity_api._get_domain_driver_and_entity_id(user_2_id))
+ driver.user.delete(entity_id)
+
+ # List group users and verify only user 1.
+ res = self.identity_api.list_users_in_group(group_id)
+
+ self.assertEqual(1, len(res), "Expected 1 entry (user_1)")
+ self.assertEqual(user_1_id, res[0]['id'], "Expected user 1 id")
+
+ def test_list_group_members_when_no_members(self):
+ # List group members when there is no member in the group.
+ # No exception should be raised.
+ group = {
+ 'domain_id': CONF.identity.default_domain_id,
+ 'name': uuid.uuid4().hex,
+ 'description': uuid.uuid4().hex}
+ group = self.identity_api.create_group(group)
+
+ # If this doesn't raise, then the test is successful.
+ self.identity_api.list_users_in_group(group['id'])
+
+ def test_list_group_members_dumb_member(self):
+ self.config_fixture.config(group='ldap', use_dumb_member=True)
+ self.clear_database()
+ self.load_backends()
+ self.load_fixtures(default_fixtures)
+
+ # Create a group
+ group = dict(name=uuid.uuid4().hex,
+ domain_id=CONF.identity.default_domain_id)
+ group_id = self.identity_api.create_group(group)['id']
+
+ # Create a user
+ user = dict(name=uuid.uuid4().hex,
+ domain_id=CONF.identity.default_domain_id)
+ user_id = self.identity_api.create_user(user)['id']
+
+ # Add user to the group
+ self.identity_api.add_user_to_group(user_id, group_id)
+
+ user_ids = self.identity_api.list_users_in_group(group_id)
+ dumb_id = common_ldap.BaseLdap._dn_to_id(CONF.ldap.dumb_member)
+
+ self.assertNotIn(dumb_id, user_ids)
+
+ def test_list_domains(self):
+ domains = self.resource_api.list_domains()
+ self.assertEqual(
+ [resource.calc_default_domain()],
+ domains)
+
+ def test_list_domains_non_default_domain_id(self):
+ # If change the default_domain_id, the ID of the default domain
+ # returned by list_domains changes is the new default_domain_id.
+
+ new_domain_id = uuid.uuid4().hex
+ self.config_fixture.config(group='identity',
+ default_domain_id=new_domain_id)
+
+ domains = self.resource_api.list_domains()
+
+ self.assertEqual(new_domain_id, domains[0]['id'])
+
+ def test_authenticate_requires_simple_bind(self):
+ user = {
+ 'name': 'NO_META',
+ 'domain_id': test_backend.DEFAULT_DOMAIN_ID,
+ 'password': 'no_meta2',
+ 'enabled': True,
+ }
+ user = self.identity_api.create_user(user)
+ self.assignment_api.add_user_to_project(self.tenant_baz['id'],
+ user['id'])
+ driver = self.identity_api._select_identity_driver(
+ user['domain_id'])
+ driver.user.LDAP_USER = None
+ driver.user.LDAP_PASSWORD = None
+
+ self.assertRaises(AssertionError,
+ self.identity_api.authenticate,
+ context={},
+ user_id=user['id'],
+ password=None)
+
+ # (spzala)The group and domain crud tests below override the standard ones
+ # in test_backend.py so that we can exclude the update name test, since we
+ # do not yet support the update of either group or domain names with LDAP.
+ # In the tests below, the update is demonstrated by updating description.
+ # Refer to bug 1136403 for more detail.
+ def test_group_crud(self):
+ group = {
+ 'domain_id': CONF.identity.default_domain_id,
+ 'name': uuid.uuid4().hex,
+ 'description': uuid.uuid4().hex}
+ group = self.identity_api.create_group(group)
+ group_ref = self.identity_api.get_group(group['id'])
+ self.assertDictEqual(group_ref, group)
+ group['description'] = uuid.uuid4().hex
+ self.identity_api.update_group(group['id'], group)
+ group_ref = self.identity_api.get_group(group['id'])
+ self.assertDictEqual(group_ref, group)
+
+ self.identity_api.delete_group(group['id'])
+ self.assertRaises(exception.GroupNotFound,
+ self.identity_api.get_group,
+ group['id'])
+
+ @tests.skip_if_cache_disabled('identity')
+ def test_cache_layer_group_crud(self):
+ group = {
+ 'domain_id': CONF.identity.default_domain_id,
+ 'name': uuid.uuid4().hex}
+ group = self.identity_api.create_group(group)
+ # cache the result
+ group_ref = self.identity_api.get_group(group['id'])
+ # delete the group bypassing identity api.
+ domain_id, driver, entity_id = (
+ self.identity_api._get_domain_driver_and_entity_id(group['id']))
+ driver.delete_group(entity_id)
+
+ self.assertEqual(group_ref,
+ self.identity_api.get_group(group['id']))
+ self.identity_api.get_group.invalidate(self.identity_api, group['id'])
+ self.assertRaises(exception.GroupNotFound,
+ self.identity_api.get_group, group['id'])
+
+ group = {
+ 'domain_id': CONF.identity.default_domain_id,
+ 'name': uuid.uuid4().hex}
+ group = self.identity_api.create_group(group)
+ # cache the result
+ self.identity_api.get_group(group['id'])
+ group['description'] = uuid.uuid4().hex
+ group_ref = self.identity_api.update_group(group['id'], group)
+ self.assertDictContainsSubset(self.identity_api.get_group(group['id']),
+ group_ref)
+
+ def test_create_user_none_mapping(self):
+ # When create a user where an attribute maps to None, the entry is
+ # created without that attribute and it doesn't fail with a TypeError.
+ conf = self.get_config(CONF.identity.default_domain_id)
+ conf.ldap.user_attribute_ignore = ['enabled', 'email',
+ 'tenants', 'tenantId']
+ self.reload_backends(CONF.identity.default_domain_id)
+
+ user = {'name': u'fäké1',
+ 'password': u'fäképass1',
+ 'domain_id': CONF.identity.default_domain_id,
+ 'default_project_id': 'maps_to_none',
+ }
+
+ # If this doesn't raise, then the test is successful.
+ user = self.identity_api.create_user(user)
+
+ def test_create_user_with_boolean_string_names(self):
+ # Ensure that any attribute that is equal to the string 'TRUE'
+ # or 'FALSE' will not be converted to a boolean value, it
+ # should be returned as is.
+ boolean_strings = ['TRUE', 'FALSE', 'true', 'false', 'True', 'False',
+ 'TrUe' 'FaLse']
+ for name in boolean_strings:
+ user = {
+ 'name': name,
+ 'domain_id': CONF.identity.default_domain_id}
+ user_ref = self.identity_api.create_user(user)
+ user_info = self.identity_api.get_user(user_ref['id'])
+ self.assertEqual(name, user_info['name'])
+ # Delete the user to ensure that the Keystone uniqueness
+ # requirements combined with the case-insensitive nature of a
+ # typical LDAP schema does not cause subsequent names in
+ # boolean_strings to clash.
+ self.identity_api.delete_user(user_ref['id'])
+
+ def test_unignored_user_none_mapping(self):
+ # Ensure that an attribute that maps to None that is not explicitly
+ # ignored in configuration is implicitly ignored without triggering
+ # an error.
+ conf = self.get_config(CONF.identity.default_domain_id)
+ conf.ldap.user_attribute_ignore = ['enabled', 'email',
+ 'tenants', 'tenantId']
+ self.reload_backends(CONF.identity.default_domain_id)
+
+ user = {'name': u'fäké1',
+ 'password': u'fäképass1',
+ 'domain_id': CONF.identity.default_domain_id,
+ }
+
+ user_ref = self.identity_api.create_user(user)
+
+ # If this doesn't raise, then the test is successful.
+ self.identity_api.get_user(user_ref['id'])
+
+ def test_update_user_name(self):
+ """A user's name cannot be changed through the LDAP driver."""
+ self.assertRaises(exception.Conflict,
+ super(BaseLDAPIdentity, self).test_update_user_name)
+
+ def test_arbitrary_attributes_are_returned_from_get_user(self):
+ self.skipTest("Using arbitrary attributes doesn't work under LDAP")
+
+ def test_new_arbitrary_attributes_are_returned_from_update_user(self):
+ self.skipTest("Using arbitrary attributes doesn't work under LDAP")
+
+ def test_updated_arbitrary_attributes_are_returned_from_update_user(self):
+ self.skipTest("Using arbitrary attributes doesn't work under LDAP")
+
+ def test_cache_layer_domain_crud(self):
+ # TODO(morganfainberg): This also needs to be removed when full LDAP
+ # implementation is submitted. No need to duplicate the above test,
+ # just skip this time.
+ self.skipTest('Domains are read-only against LDAP')
+
+ def test_user_id_comma(self):
+ """Even if the user has a , in their ID, groups can be listed."""
+
+ # Create a user with a , in their ID
+ # NOTE(blk-u): the DN for this user is hard-coded in fakeldap!
+
+ # Since we want to fake up this special ID, we'll squirt this
+ # direct into the driver and bypass the manager layer.
+ user_id = u'Doe, John'
+ user = {
+ 'id': user_id,
+ 'name': self.getUniqueString(),
+ 'password': self.getUniqueString(),
+ 'domain_id': CONF.identity.default_domain_id,
+ }
+ user = self.identity_api.driver.create_user(user_id, user)
+
+ # Now we'll use the manager to discover it, which will create a
+ # Public ID for it.
+ ref_list = self.identity_api.list_users()
+ public_user_id = None
+ for ref in ref_list:
+ if ref['name'] == user['name']:
+ public_user_id = ref['id']
+ break
+
+ # Create a group
+ group_id = uuid.uuid4().hex
+ group = {
+ 'id': group_id,
+ 'name': self.getUniqueString(prefix='tuidc'),
+ 'description': self.getUniqueString(),
+ 'domain_id': CONF.identity.default_domain_id,
+ }
+ group = self.identity_api.driver.create_group(group_id, group)
+ # Now we'll use the manager to discover it, which will create a
+ # Public ID for it.
+ ref_list = self.identity_api.list_groups()
+ public_group_id = None
+ for ref in ref_list:
+ if ref['name'] == group['name']:
+ public_group_id = ref['id']
+ break
+
+ # Put the user in the group
+ self.identity_api.add_user_to_group(public_user_id, public_group_id)
+
+ # List groups for user.
+ ref_list = self.identity_api.list_groups_for_user(public_user_id)
+
+ group['id'] = public_group_id
+ self.assertThat(ref_list, matchers.Equals([group]))
+
+ def test_user_id_comma_grants(self):
+ """Even if the user has a , in their ID, can get user and group grants.
+ """
+
+ # Create a user with a , in their ID
+ # NOTE(blk-u): the DN for this user is hard-coded in fakeldap!
+
+ # Since we want to fake up this special ID, we'll squirt this
+ # direct into the driver and bypass the manager layer
+ user_id = u'Doe, John'
+ user = {
+ 'id': user_id,
+ 'name': self.getUniqueString(),
+ 'password': self.getUniqueString(),
+ 'domain_id': CONF.identity.default_domain_id,
+ }
+ self.identity_api.driver.create_user(user_id, user)
+
+ # Now we'll use the manager to discover it, which will create a
+ # Public ID for it.
+ ref_list = self.identity_api.list_users()
+ public_user_id = None
+ for ref in ref_list:
+ if ref['name'] == user['name']:
+ public_user_id = ref['id']
+ break
+
+ # Grant the user a role on a project.
+
+ role_id = 'member'
+ project_id = self.tenant_baz['id']
+
+ self.assignment_api.create_grant(role_id, user_id=public_user_id,
+ project_id=project_id)
+
+ role_ref = self.assignment_api.get_grant(role_id,
+ user_id=public_user_id,
+ project_id=project_id)
+
+ self.assertEqual(role_id, role_ref['id'])
+
+ def test_user_enabled_ignored_disable_error(self):
+ # When the server is configured so that the enabled attribute is
+ # ignored for users, users cannot be disabled.
+
+ self.config_fixture.config(group='ldap',
+ user_attribute_ignore=['enabled'])
+
+ # Need to re-load backends for the config change to take effect.
+ self.load_backends()
+
+ # Attempt to disable the user.
+ self.assertRaises(exception.ForbiddenAction,
+ self.identity_api.update_user, self.user_foo['id'],
+ {'enabled': False})
+
+ user_info = self.identity_api.get_user(self.user_foo['id'])
+
+ # If 'enabled' is ignored then 'enabled' isn't returned as part of the
+ # ref.
+ self.assertNotIn('enabled', user_info)
+
+ def test_group_enabled_ignored_disable_error(self):
+ # When the server is configured so that the enabled attribute is
+ # ignored for groups, groups cannot be disabled.
+
+ self.config_fixture.config(group='ldap',
+ group_attribute_ignore=['enabled'])
+
+ # Need to re-load backends for the config change to take effect.
+ self.load_backends()
+
+ # There's no group fixture so create a group.
+ new_domain = self._get_domain_fixture()
+ new_group = {'domain_id': new_domain['id'],
+ 'name': uuid.uuid4().hex}
+ new_group = self.identity_api.create_group(new_group)
+
+ # Attempt to disable the group.
+ self.assertRaises(exception.ForbiddenAction,
+ self.identity_api.update_group, new_group['id'],
+ {'enabled': False})
+
+ group_info = self.identity_api.get_group(new_group['id'])
+
+ # If 'enabled' is ignored then 'enabled' isn't returned as part of the
+ # ref.
+ self.assertNotIn('enabled', group_info)
+
+ def test_project_enabled_ignored_disable_error(self):
+ # When the server is configured so that the enabled attribute is
+ # ignored for projects, projects cannot be disabled.
+
+ self.config_fixture.config(group='ldap',
+ project_attribute_ignore=['enabled'])
+
+ # Need to re-load backends for the config change to take effect.
+ self.load_backends()
+
+ # Attempt to disable the project.
+ self.assertRaises(exception.ForbiddenAction,
+ self.resource_api.update_project,
+ self.tenant_baz['id'], {'enabled': False})
+
+ project_info = self.resource_api.get_project(self.tenant_baz['id'])
+
+ # Unlike other entities, if 'enabled' is ignored then 'enabled' is
+ # returned as part of the ref.
+ self.assertIs(True, project_info['enabled'])
+
+
+class LDAPIdentity(BaseLDAPIdentity, tests.TestCase):
+
+ def setUp(self):
+ # NOTE(dstanek): The database must be setup prior to calling the
+ # parent's setUp. The parent's setUp uses services (like
+ # credentials) that require a database.
+ self.useFixture(database.Database())
+ super(LDAPIdentity, self).setUp()
+
+ def load_fixtures(self, fixtures):
+ # Override super impl since need to create group container.
+ create_group_container(self.identity_api)
+ super(LDAPIdentity, self).load_fixtures(fixtures)
+
+ def test_configurable_allowed_project_actions(self):
+ tenant = {'id': u'fäké1', 'name': u'fäké1', 'enabled': True}
+ self.resource_api.create_project(u'fäké1', tenant)
+ tenant_ref = self.resource_api.get_project(u'fäké1')
+ self.assertEqual(u'fäké1', tenant_ref['id'])
+
+ tenant['enabled'] = False
+ self.resource_api.update_project(u'fäké1', tenant)
+
+ self.resource_api.delete_project(u'fäké1')
+ self.assertRaises(exception.ProjectNotFound,
+ self.resource_api.get_project,
+ u'fäké1')
+
+ def test_configurable_subtree_delete(self):
+ self.config_fixture.config(group='ldap', allow_subtree_delete=True)
+ self.load_backends()
+
+ project1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
+ 'domain_id': CONF.identity.default_domain_id}
+ self.resource_api.create_project(project1['id'], project1)
+
+ role1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.role_api.create_role(role1['id'], role1)
+
+ user1 = {'name': uuid.uuid4().hex,
+ 'domain_id': CONF.identity.default_domain_id,
+ 'password': uuid.uuid4().hex,
+ 'enabled': True}
+ user1 = self.identity_api.create_user(user1)
+
+ self.assignment_api.add_role_to_user_and_project(
+ user_id=user1['id'],
+ tenant_id=project1['id'],
+ role_id=role1['id'])
+
+ self.resource_api.delete_project(project1['id'])
+ self.assertRaises(exception.ProjectNotFound,
+ self.resource_api.get_project,
+ project1['id'])
+
+ self.resource_api.create_project(project1['id'], project1)
+
+ list = self.assignment_api.get_roles_for_user_and_project(
+ user1['id'],
+ project1['id'])
+ self.assertEqual(0, len(list))
+
+ def test_configurable_forbidden_project_actions(self):
+ self.config_fixture.config(
+ group='ldap', project_allow_create=False,
+ project_allow_update=False, project_allow_delete=False)
+ self.load_backends()
+
+ tenant = {'id': u'fäké1', 'name': u'fäké1'}
+ self.assertRaises(exception.ForbiddenAction,
+ self.resource_api.create_project,
+ u'fäké1',
+ tenant)
+
+ self.tenant_bar['enabled'] = False
+ self.assertRaises(exception.ForbiddenAction,
+ self.resource_api.update_project,
+ self.tenant_bar['id'],
+ self.tenant_bar)
+ self.assertRaises(exception.ForbiddenAction,
+ self.resource_api.delete_project,
+ self.tenant_bar['id'])
+
+ def test_project_filter(self):
+ tenant_ref = self.resource_api.get_project(self.tenant_bar['id'])
+ self.assertDictEqual(tenant_ref, self.tenant_bar)
+
+ self.config_fixture.config(group='ldap',
+ project_filter='(CN=DOES_NOT_MATCH)')
+ self.load_backends()
+ # NOTE(morganfainberg): CONF.ldap.project_filter will not be
+ # dynamically changed at runtime. This invalidate is a work-around for
+ # the expectation that it is safe to change config values in tests that
+ # could affect what the drivers would return up to the manager. This
+ # solves this assumption when working with aggressive (on-create)
+ # cache population.
+ self.role_api.get_role.invalidate(self.role_api,
+ self.role_member['id'])
+ self.role_api.get_role(self.role_member['id'])
+ self.resource_api.get_project.invalidate(self.resource_api,
+ self.tenant_bar['id'])
+ self.assertRaises(exception.ProjectNotFound,
+ self.resource_api.get_project,
+ self.tenant_bar['id'])
+
+ def test_dumb_member(self):
+ self.config_fixture.config(group='ldap', use_dumb_member=True)
+ self.clear_database()
+ self.load_backends()
+ self.load_fixtures(default_fixtures)
+ dumb_id = common_ldap.BaseLdap._dn_to_id(CONF.ldap.dumb_member)
+ self.assertRaises(exception.UserNotFound,
+ self.identity_api.get_user,
+ dumb_id)
+
+ def test_project_attribute_mapping(self):
+ self.config_fixture.config(
+ group='ldap', project_name_attribute='ou',
+ project_desc_attribute='description',
+ project_enabled_attribute='enabled')
+ self.clear_database()
+ self.load_backends()
+ self.load_fixtures(default_fixtures)
+ # NOTE(morganfainberg): CONF.ldap.project_name_attribute,
+ # CONF.ldap.project_desc_attribute, and
+ # CONF.ldap.project_enabled_attribute will not be
+ # dynamically changed at runtime. This invalidate is a work-around for
+ # the expectation that it is safe to change config values in tests that
+ # could affect what the drivers would return up to the manager. This
+ # solves this assumption when working with aggressive (on-create)
+ # cache population.
+ self.resource_api.get_project.invalidate(self.resource_api,
+ self.tenant_baz['id'])
+ tenant_ref = self.resource_api.get_project(self.tenant_baz['id'])
+ self.assertEqual(self.tenant_baz['id'], tenant_ref['id'])
+ self.assertEqual(self.tenant_baz['name'], tenant_ref['name'])
+ self.assertEqual(
+ self.tenant_baz['description'],
+ tenant_ref['description'])
+ self.assertEqual(self.tenant_baz['enabled'], tenant_ref['enabled'])
+
+ self.config_fixture.config(group='ldap',
+ project_name_attribute='description',
+ project_desc_attribute='ou')
+ self.load_backends()
+ # NOTE(morganfainberg): CONF.ldap.project_name_attribute,
+ # CONF.ldap.project_desc_attribute, and
+ # CONF.ldap.project_enabled_attribute will not be
+ # dynamically changed at runtime. This invalidate is a work-around for
+ # the expectation that it is safe to change config values in tests that
+ # could affect what the drivers would return up to the manager. This
+ # solves this assumption when working with aggressive (on-create)
+ # cache population.
+ self.resource_api.get_project.invalidate(self.resource_api,
+ self.tenant_baz['id'])
+ tenant_ref = self.resource_api.get_project(self.tenant_baz['id'])
+ self.assertEqual(self.tenant_baz['id'], tenant_ref['id'])
+ self.assertEqual(self.tenant_baz['description'], tenant_ref['name'])
+ self.assertEqual(self.tenant_baz['name'], tenant_ref['description'])
+ self.assertEqual(self.tenant_baz['enabled'], tenant_ref['enabled'])
+
+ def test_project_attribute_ignore(self):
+ self.config_fixture.config(
+ group='ldap',
+ project_attribute_ignore=['name', 'description', 'enabled'])
+ self.clear_database()
+ self.load_backends()
+ self.load_fixtures(default_fixtures)
+ # NOTE(morganfainberg): CONF.ldap.project_attribute_ignore will not be
+ # dynamically changed at runtime. This invalidate is a work-around for
+ # the expectation that it is safe to change configs values in tests
+ # that could affect what the drivers would return up to the manager.
+ # This solves this assumption when working with aggressive (on-create)
+ # cache population.
+ self.resource_api.get_project.invalidate(self.resource_api,
+ self.tenant_baz['id'])
+ tenant_ref = self.resource_api.get_project(self.tenant_baz['id'])
+ self.assertEqual(self.tenant_baz['id'], tenant_ref['id'])
+ self.assertNotIn('name', tenant_ref)
+ self.assertNotIn('description', tenant_ref)
+ self.assertNotIn('enabled', tenant_ref)
+
+ def test_user_enable_attribute_mask(self):
+ self.config_fixture.config(group='ldap', user_enabled_mask=2,
+ user_enabled_default='512')
+ self.clear_database()
+ self.load_backends()
+ self.load_fixtures(default_fixtures)
+
+ user = {'name': u'fäké1', 'enabled': True,
+ 'domain_id': CONF.identity.default_domain_id}
+
+ user_ref = self.identity_api.create_user(user)
+
+ # Use assertIs rather than assertTrue because assertIs will assert the
+ # value is a Boolean as expected.
+ self.assertIs(user_ref['enabled'], True)
+ self.assertNotIn('enabled_nomask', user_ref)
+
+ enabled_vals = self.get_user_enabled_vals(user_ref)
+ self.assertEqual([512], enabled_vals)
+
+ user_ref = self.identity_api.get_user(user_ref['id'])
+ self.assertIs(user_ref['enabled'], True)
+ self.assertNotIn('enabled_nomask', user_ref)
+
+ user['enabled'] = False
+ user_ref = self.identity_api.update_user(user_ref['id'], user)
+ self.assertIs(user_ref['enabled'], False)
+ self.assertNotIn('enabled_nomask', user_ref)
+
+ enabled_vals = self.get_user_enabled_vals(user_ref)
+ self.assertEqual([514], enabled_vals)
+
+ user_ref = self.identity_api.get_user(user_ref['id'])
+ self.assertIs(user_ref['enabled'], False)
+ self.assertNotIn('enabled_nomask', user_ref)
+
+ user['enabled'] = True
+ user_ref = self.identity_api.update_user(user_ref['id'], user)
+ self.assertIs(user_ref['enabled'], True)
+ self.assertNotIn('enabled_nomask', user_ref)
+
+ enabled_vals = self.get_user_enabled_vals(user_ref)
+ self.assertEqual([512], enabled_vals)
+
+ user_ref = self.identity_api.get_user(user_ref['id'])
+ self.assertIs(user_ref['enabled'], True)
+ self.assertNotIn('enabled_nomask', user_ref)
+
+ def test_user_enabled_invert(self):
+ self.config_fixture.config(group='ldap', user_enabled_invert=True,
+ user_enabled_default=False)
+ self.clear_database()
+ self.load_backends()
+ self.load_fixtures(default_fixtures)
+
+ user1 = {'name': u'fäké1', 'enabled': True,
+ 'domain_id': CONF.identity.default_domain_id}
+
+ user2 = {'name': u'fäké2', 'enabled': False,
+ 'domain_id': CONF.identity.default_domain_id}
+
+ user3 = {'name': u'fäké3',
+ 'domain_id': CONF.identity.default_domain_id}
+
+ # Ensure that the LDAP attribute is False for a newly created
+ # enabled user.
+ user_ref = self.identity_api.create_user(user1)
+ self.assertIs(True, user_ref['enabled'])
+ enabled_vals = self.get_user_enabled_vals(user_ref)
+ self.assertEqual([False], enabled_vals)
+ user_ref = self.identity_api.get_user(user_ref['id'])
+ self.assertIs(True, user_ref['enabled'])
+
+ # Ensure that the LDAP attribute is True for a disabled user.
+ user1['enabled'] = False
+ user_ref = self.identity_api.update_user(user_ref['id'], user1)
+ self.assertIs(False, user_ref['enabled'])
+ enabled_vals = self.get_user_enabled_vals(user_ref)
+ self.assertEqual([True], enabled_vals)
+
+ # Enable the user and ensure that the LDAP attribute is True again.
+ user1['enabled'] = True
+ user_ref = self.identity_api.update_user(user_ref['id'], user1)
+ self.assertIs(True, user_ref['enabled'])
+ enabled_vals = self.get_user_enabled_vals(user_ref)
+ self.assertEqual([False], enabled_vals)
+
+ # Ensure that the LDAP attribute is True for a newly created
+ # disabled user.
+ user_ref = self.identity_api.create_user(user2)
+ self.assertIs(False, user_ref['enabled'])
+ enabled_vals = self.get_user_enabled_vals(user_ref)
+ self.assertEqual([True], enabled_vals)
+ user_ref = self.identity_api.get_user(user_ref['id'])
+ self.assertIs(False, user_ref['enabled'])
+
+ # Ensure that the LDAP attribute is inverted for a newly created
+ # user when the user_enabled_default setting is used.
+ user_ref = self.identity_api.create_user(user3)
+ self.assertIs(True, user_ref['enabled'])
+ enabled_vals = self.get_user_enabled_vals(user_ref)
+ self.assertEqual([False], enabled_vals)
+ user_ref = self.identity_api.get_user(user_ref['id'])
+ self.assertIs(True, user_ref['enabled'])
+
+ @mock.patch.object(common_ldap_core.BaseLdap, '_ldap_get')
+ def test_user_enabled_invert_no_enabled_value(self, mock_ldap_get):
+ self.config_fixture.config(group='ldap', user_enabled_invert=True,
+ user_enabled_default=False)
+ # Mock the search results to return an entry with
+ # no enabled value.
+ mock_ldap_get.return_value = (
+ 'cn=junk,dc=example,dc=com',
+ {
+ 'sn': [uuid.uuid4().hex],
+ 'email': [uuid.uuid4().hex],
+ 'cn': ['junk']
+ }
+ )
+
+ user_api = identity.backends.ldap.UserApi(CONF)
+ user_ref = user_api.get('junk')
+ # Ensure that the model enabled attribute is inverted
+ # from the resource default.
+ self.assertIs(not CONF.ldap.user_enabled_default, user_ref['enabled'])
+
+ @mock.patch.object(common_ldap_core.BaseLdap, '_ldap_get')
+ def test_user_enabled_invert_default_str_value(self, mock_ldap_get):
+ self.config_fixture.config(group='ldap', user_enabled_invert=True,
+ user_enabled_default='False')
+ # Mock the search results to return an entry with
+ # no enabled value.
+ mock_ldap_get.return_value = (
+ 'cn=junk,dc=example,dc=com',
+ {
+ 'sn': [uuid.uuid4().hex],
+ 'email': [uuid.uuid4().hex],
+ 'cn': ['junk']
+ }
+ )
+
+ user_api = identity.backends.ldap.UserApi(CONF)
+ user_ref = user_api.get('junk')
+ # Ensure that the model enabled attribute is inverted
+ # from the resource default.
+ self.assertIs(True, user_ref['enabled'])
+
+ @mock.patch.object(common_ldap_core.BaseLdap, '_ldap_get')
+ def test_user_enabled_attribute_handles_expired(self, mock_ldap_get):
+ # If using 'passwordisexpired' as enabled attribute, and inverting it,
+ # Then an unauthorized user (expired password) should not be enabled.
+ self.config_fixture.config(group='ldap', user_enabled_invert=True,
+ user_enabled_attribute='passwordisexpired')
+ mock_ldap_get.return_value = (
+ u'uid=123456789,c=us,ou=our_ldap,o=acme.com',
+ {
+ 'uid': [123456789],
+ 'mail': ['shaun@acme.com'],
+ 'passwordisexpired': ['TRUE'],
+ 'cn': ['uid=123456789,c=us,ou=our_ldap,o=acme.com']
+ }
+ )
+
+ user_api = identity.backends.ldap.UserApi(CONF)
+ user_ref = user_api.get('123456789')
+ self.assertIs(False, user_ref['enabled'])
+
+ @mock.patch.object(common_ldap_core.BaseLdap, '_ldap_get')
+ def test_user_enabled_attribute_handles_utf8(self, mock_ldap_get):
+ # If using 'passwordisexpired' as enabled attribute, and inverting it,
+ # and the result is utf8 encoded, then the an authorized user should
+ # be enabled.
+ self.config_fixture.config(group='ldap', user_enabled_invert=True,
+ user_enabled_attribute='passwordisexpired')
+ mock_ldap_get.return_value = (
+ u'uid=123456789,c=us,ou=our_ldap,o=acme.com',
+ {
+ 'uid': [123456789],
+ 'mail': [u'shaun@acme.com'],
+ 'passwordisexpired': [u'false'],
+ 'cn': [u'uid=123456789,c=us,ou=our_ldap,o=acme.com']
+ }
+ )
+
+ user_api = identity.backends.ldap.UserApi(CONF)
+ user_ref = user_api.get('123456789')
+ self.assertIs(True, user_ref['enabled'])
+
+ @mock.patch.object(common_ldap_core.KeystoneLDAPHandler, 'simple_bind_s')
+ def test_user_api_get_connection_no_user_password(self, mocked_method):
+ """Don't bind in case the user and password are blank."""
+ # Ensure the username/password are in-fact blank
+ self.config_fixture.config(group='ldap', user=None, password=None)
+ user_api = identity.backends.ldap.UserApi(CONF)
+ user_api.get_connection(user=None, password=None)
+ self.assertFalse(mocked_method.called,
+ msg='`simple_bind_s` method was unexpectedly called')
+
+ @mock.patch.object(common_ldap_core.KeystoneLDAPHandler, 'connect')
+ def test_chase_referrals_off(self, mocked_fakeldap):
+ self.config_fixture.config(
+ group='ldap',
+ url='fake://memory',
+ chase_referrals=False)
+ user_api = identity.backends.ldap.UserApi(CONF)
+ user_api.get_connection(user=None, password=None)
+
+ # The last call_arg should be a dictionary and should contain
+ # chase_referrals. Check to make sure the value of chase_referrals
+ # is as expected.
+ self.assertFalse(mocked_fakeldap.call_args[-1]['chase_referrals'])
+
+ @mock.patch.object(common_ldap_core.KeystoneLDAPHandler, 'connect')
+ def test_chase_referrals_on(self, mocked_fakeldap):
+ self.config_fixture.config(
+ group='ldap',
+ url='fake://memory',
+ chase_referrals=True)
+ user_api = identity.backends.ldap.UserApi(CONF)
+ user_api.get_connection(user=None, password=None)
+
+ # The last call_arg should be a dictionary and should contain
+ # chase_referrals. Check to make sure the value of chase_referrals
+ # is as expected.
+ self.assertTrue(mocked_fakeldap.call_args[-1]['chase_referrals'])
+
+ @mock.patch.object(common_ldap_core.KeystoneLDAPHandler, 'connect')
+ def test_debug_level_set(self, mocked_fakeldap):
+ level = 12345
+ self.config_fixture.config(
+ group='ldap',
+ url='fake://memory',
+ debug_level=level)
+ user_api = identity.backends.ldap.UserApi(CONF)
+ user_api.get_connection(user=None, password=None)
+
+ # The last call_arg should be a dictionary and should contain
+ # debug_level. Check to make sure the value of debug_level
+ # is as expected.
+ self.assertEqual(level, mocked_fakeldap.call_args[-1]['debug_level'])
+
+ def test_wrong_ldap_scope(self):
+ self.config_fixture.config(group='ldap', query_scope=uuid.uuid4().hex)
+ self.assertRaisesRegexp(
+ ValueError,
+ 'Invalid LDAP scope: %s. *' % CONF.ldap.query_scope,
+ identity.backends.ldap.Identity)
+
+ def test_wrong_alias_dereferencing(self):
+ self.config_fixture.config(group='ldap',
+ alias_dereferencing=uuid.uuid4().hex)
+ self.assertRaisesRegexp(
+ ValueError,
+ 'Invalid LDAP deref option: %s\.' % CONF.ldap.alias_dereferencing,
+ identity.backends.ldap.Identity)
+
+ def test_is_dumb_member(self):
+ self.config_fixture.config(group='ldap',
+ use_dumb_member=True)
+ self.load_backends()
+
+ dn = 'cn=dumb,dc=nonexistent'
+ self.assertTrue(self.identity_api.driver.user._is_dumb_member(dn))
+
+ def test_is_dumb_member_upper_case_keys(self):
+ self.config_fixture.config(group='ldap',
+ use_dumb_member=True)
+ self.load_backends()
+
+ dn = 'CN=dumb,DC=nonexistent'
+ self.assertTrue(self.identity_api.driver.user._is_dumb_member(dn))
+
+ def test_is_dumb_member_with_false_use_dumb_member(self):
+ self.config_fixture.config(group='ldap',
+ use_dumb_member=False)
+ self.load_backends()
+ dn = 'cn=dumb,dc=nonexistent'
+ self.assertFalse(self.identity_api.driver.user._is_dumb_member(dn))
+
+ def test_is_dumb_member_not_dumb(self):
+ self.config_fixture.config(group='ldap',
+ use_dumb_member=True)
+ self.load_backends()
+ dn = 'ou=some,dc=example.com'
+ self.assertFalse(self.identity_api.driver.user._is_dumb_member(dn))
+
+ def test_user_extra_attribute_mapping(self):
+ self.config_fixture.config(
+ group='ldap',
+ user_additional_attribute_mapping=['description:name'])
+ self.load_backends()
+ user = {
+ 'name': 'EXTRA_ATTRIBUTES',
+ 'password': 'extra',
+ 'domain_id': CONF.identity.default_domain_id
+ }
+ user = self.identity_api.create_user(user)
+ dn, attrs = self.identity_api.driver.user._ldap_get(user['id'])
+ self.assertThat([user['name']], matchers.Equals(attrs['description']))
+
+ def test_user_extra_attribute_mapping_description_is_returned(self):
+ # Given a mapping like description:description, the description is
+ # returned.
+
+ self.config_fixture.config(
+ group='ldap',
+ user_additional_attribute_mapping=['description:description'])
+ self.load_backends()
+
+ description = uuid.uuid4().hex
+ user = {
+ 'name': uuid.uuid4().hex,
+ 'description': description,
+ 'password': uuid.uuid4().hex,
+ 'domain_id': CONF.identity.default_domain_id
+ }
+ user = self.identity_api.create_user(user)
+ res = self.identity_api.driver.user.get_all()
+
+ new_user = [u for u in res if u['id'] == user['id']][0]
+ self.assertThat(new_user['description'], matchers.Equals(description))
+
+ @mock.patch.object(common_ldap_core.BaseLdap, '_ldap_get')
+ def test_user_mixed_case_attribute(self, mock_ldap_get):
+ # Mock the search results to return attribute names
+ # with unexpected case.
+ mock_ldap_get.return_value = (
+ 'cn=junk,dc=example,dc=com',
+ {
+ 'sN': [uuid.uuid4().hex],
+ 'MaIl': [uuid.uuid4().hex],
+ 'cn': ['junk']
+ }
+ )
+ user = self.identity_api.get_user('junk')
+ self.assertEqual(mock_ldap_get.return_value[1]['sN'][0],
+ user['name'])
+ self.assertEqual(mock_ldap_get.return_value[1]['MaIl'][0],
+ user['email'])
+
+ def test_parse_extra_attribute_mapping(self):
+ option_list = ['description:name', 'gecos:password',
+ 'fake:invalid', 'invalid1', 'invalid2:',
+ 'description:name:something']
+ mapping = self.identity_api.driver.user._parse_extra_attrs(option_list)
+ expected_dict = {'description': 'name', 'gecos': 'password',
+ 'fake': 'invalid', 'invalid2': ''}
+ self.assertDictEqual(expected_dict, mapping)
+
+# TODO(henry-nash): These need to be removed when the full LDAP implementation
+# is submitted - see Bugs 1092187, 1101287, 1101276, 1101289
+
+ def test_domain_crud(self):
+ domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
+ 'enabled': True, 'description': uuid.uuid4().hex}
+ self.assertRaises(exception.Forbidden,
+ self.resource_api.create_domain,
+ domain['id'],
+ domain)
+ self.assertRaises(exception.Conflict,
+ self.resource_api.create_domain,
+ CONF.identity.default_domain_id,
+ domain)
+ self.assertRaises(exception.DomainNotFound,
+ self.resource_api.get_domain,
+ domain['id'])
+
+ domain['description'] = uuid.uuid4().hex
+ self.assertRaises(exception.DomainNotFound,
+ self.resource_api.update_domain,
+ domain['id'],
+ domain)
+ self.assertRaises(exception.Forbidden,
+ self.resource_api.update_domain,
+ CONF.identity.default_domain_id,
+ domain)
+ self.assertRaises(exception.DomainNotFound,
+ self.resource_api.get_domain,
+ domain['id'])
+ self.assertRaises(exception.DomainNotFound,
+ self.resource_api.delete_domain,
+ domain['id'])
+ self.assertRaises(exception.Forbidden,
+ self.resource_api.delete_domain,
+ CONF.identity.default_domain_id)
+ self.assertRaises(exception.DomainNotFound,
+ self.resource_api.get_domain,
+ domain['id'])
+
+ @tests.skip_if_no_multiple_domains_support
+ def test_create_domain_case_sensitivity(self):
+ # domains are read-only, so case sensitivity isn't an issue
+ ref = {
+ 'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex}
+ self.assertRaises(exception.Forbidden,
+ self.resource_api.create_domain,
+ ref['id'],
+ ref)
+
+ def test_cache_layer_domain_crud(self):
+ # TODO(morganfainberg): This also needs to be removed when full LDAP
+ # implementation is submitted. No need to duplicate the above test,
+ # just skip this time.
+ self.skipTest('Domains are read-only against LDAP')
+
+ def test_domain_rename_invalidates_get_domain_by_name_cache(self):
+ parent = super(LDAPIdentity, self)
+ self.assertRaises(
+ exception.Forbidden,
+ parent.test_domain_rename_invalidates_get_domain_by_name_cache)
+
+ def test_project_rename_invalidates_get_project_by_name_cache(self):
+ parent = super(LDAPIdentity, self)
+ self.assertRaises(
+ exception.Forbidden,
+ parent.test_project_rename_invalidates_get_project_by_name_cache)
+
+ def test_project_crud(self):
+ # NOTE(topol): LDAP implementation does not currently support the
+ # updating of a project name so this method override
+ # provides a different update test
+ project = {'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'domain_id': CONF.identity.default_domain_id,
+ 'description': uuid.uuid4().hex,
+ 'enabled': True,
+ 'parent_id': None}
+ self.resource_api.create_project(project['id'], project)
+ project_ref = self.resource_api.get_project(project['id'])
+
+ self.assertDictEqual(project_ref, project)
+
+ project['description'] = uuid.uuid4().hex
+ self.resource_api.update_project(project['id'], project)
+ project_ref = self.resource_api.get_project(project['id'])
+ self.assertDictEqual(project_ref, project)
+
+ self.resource_api.delete_project(project['id'])
+ self.assertRaises(exception.ProjectNotFound,
+ self.resource_api.get_project,
+ project['id'])
+
+ @tests.skip_if_cache_disabled('assignment')
+ def test_cache_layer_project_crud(self):
+ # NOTE(morganfainberg): LDAP implementation does not currently support
+ # updating project names. This method override provides a different
+ # update test.
+ project = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
+ 'domain_id': CONF.identity.default_domain_id,
+ 'description': uuid.uuid4().hex}
+ project_id = project['id']
+ # Create a project
+ self.resource_api.create_project(project_id, project)
+ self.resource_api.get_project(project_id)
+ updated_project = copy.deepcopy(project)
+ updated_project['description'] = uuid.uuid4().hex
+ # Update project, bypassing resource manager
+ self.resource_api.driver.update_project(project_id,
+ updated_project)
+ # Verify get_project still returns the original project_ref
+ self.assertDictContainsSubset(
+ project, self.resource_api.get_project(project_id))
+ # Invalidate cache
+ self.resource_api.get_project.invalidate(self.resource_api,
+ project_id)
+ # Verify get_project now returns the new project
+ self.assertDictContainsSubset(
+ updated_project,
+ self.resource_api.get_project(project_id))
+ # Update project using the resource_api manager back to original
+ self.resource_api.update_project(project['id'], project)
+ # Verify get_project returns the original project_ref
+ self.assertDictContainsSubset(
+ project, self.resource_api.get_project(project_id))
+ # Delete project bypassing resource_api
+ self.resource_api.driver.delete_project(project_id)
+ # Verify get_project still returns the project_ref
+ self.assertDictContainsSubset(
+ project, self.resource_api.get_project(project_id))
+ # Invalidate cache
+ self.resource_api.get_project.invalidate(self.resource_api,
+ project_id)
+ # Verify ProjectNotFound now raised
+ self.assertRaises(exception.ProjectNotFound,
+ self.resource_api.get_project,
+ project_id)
+ # recreate project
+ self.resource_api.create_project(project_id, project)
+ self.resource_api.get_project(project_id)
+ # delete project
+ self.resource_api.delete_project(project_id)
+ # Verify ProjectNotFound is raised
+ self.assertRaises(exception.ProjectNotFound,
+ self.resource_api.get_project,
+ project_id)
+
+ def _assert_create_hierarchy_not_allowed(self):
+ domain = self._get_domain_fixture()
+
+ project1 = {'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'description': '',
+ 'domain_id': domain['id'],
+ 'enabled': True,
+ 'parent_id': None}
+ self.resource_api.create_project(project1['id'], project1)
+
+ # Creating project2 under project1. LDAP will not allow
+ # the creation of a project with parent_id being set
+ project2 = {'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'description': '',
+ 'domain_id': domain['id'],
+ 'enabled': True,
+ 'parent_id': project1['id']}
+
+ self.assertRaises(exception.InvalidParentProject,
+ self.resource_api.create_project,
+ project2['id'],
+ project2)
+
+ # Now, we'll create project 2 with no parent
+ project2['parent_id'] = None
+ self.resource_api.create_project(project2['id'], project2)
+
+ # Returning projects to be used across the tests
+ return [project1, project2]
+
+ def test_check_leaf_projects(self):
+ projects = self._assert_create_hierarchy_not_allowed()
+ for project in projects:
+ self.assertTrue(self.resource_api.is_leaf_project(project))
+
+ def test_list_projects_in_subtree(self):
+ projects = self._assert_create_hierarchy_not_allowed()
+ for project in projects:
+ subtree_list = self.resource_api.list_projects_in_subtree(
+ project)
+ self.assertEqual(0, len(subtree_list))
+
+ def test_list_project_parents(self):
+ projects = self._assert_create_hierarchy_not_allowed()
+ for project in projects:
+ parents_list = self.resource_api.list_project_parents(project)
+ self.assertEqual(0, len(parents_list))
+
+ def test_hierarchical_projects_crud(self):
+ self._assert_create_hierarchy_not_allowed()
+
+ def test_create_project_under_disabled_one(self):
+ self._assert_create_hierarchy_not_allowed()
+
+ def test_create_project_with_invalid_parent(self):
+ self._assert_create_hierarchy_not_allowed()
+
+ def test_create_leaf_project_with_invalid_domain(self):
+ self._assert_create_hierarchy_not_allowed()
+
+ def test_update_project_parent(self):
+ self._assert_create_hierarchy_not_allowed()
+
+ def test_enable_project_with_disabled_parent(self):
+ self._assert_create_hierarchy_not_allowed()
+
+ def test_disable_hierarchical_leaf_project(self):
+ self._assert_create_hierarchy_not_allowed()
+
+ def test_disable_hierarchical_not_leaf_project(self):
+ self._assert_create_hierarchy_not_allowed()
+
+ def test_delete_hierarchical_leaf_project(self):
+ self._assert_create_hierarchy_not_allowed()
+
+ def test_delete_hierarchical_not_leaf_project(self):
+ self._assert_create_hierarchy_not_allowed()
+
+ def test_check_hierarchy_depth(self):
+ projects = self._assert_create_hierarchy_not_allowed()
+ for project in projects:
+ depth = self._get_hierarchy_depth(project['id'])
+ self.assertEqual(1, depth)
+
+ def test_multi_role_grant_by_user_group_on_project_domain(self):
+ # This is a partial implementation of the standard test that
+ # is defined in test_backend.py. It omits both domain and
+ # group grants. since neither of these are yet supported by
+ # the ldap backend.
+
+ role_list = []
+ for _ in range(2):
+ role = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.role_api.create_role(role['id'], role)
+ role_list.append(role)
+
+ user1 = {'name': uuid.uuid4().hex,
+ 'domain_id': CONF.identity.default_domain_id,
+ 'password': uuid.uuid4().hex,
+ 'enabled': True}
+ user1 = self.identity_api.create_user(user1)
+ project1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
+ 'domain_id': CONF.identity.default_domain_id}
+ self.resource_api.create_project(project1['id'], project1)
+
+ self.assignment_api.add_role_to_user_and_project(
+ user_id=user1['id'],
+ tenant_id=project1['id'],
+ role_id=role_list[0]['id'])
+ self.assignment_api.add_role_to_user_and_project(
+ user_id=user1['id'],
+ tenant_id=project1['id'],
+ role_id=role_list[1]['id'])
+
+ # Although list_grants are not yet supported, we can test the
+ # alternate way of getting back lists of grants, where user
+ # and group roles are combined. Only directly assigned user
+ # roles are available, since group grants are not yet supported
+
+ combined_list = self.assignment_api.get_roles_for_user_and_project(
+ user1['id'],
+ project1['id'])
+ self.assertEqual(2, len(combined_list))
+ self.assertIn(role_list[0]['id'], combined_list)
+ self.assertIn(role_list[1]['id'], combined_list)
+
+ # Finally, although domain roles are not implemented, check we can
+ # issue the combined get roles call with benign results, since thus is
+ # used in token generation
+
+ combined_role_list = self.assignment_api.get_roles_for_user_and_domain(
+ user1['id'], CONF.identity.default_domain_id)
+ self.assertEqual(0, len(combined_role_list))
+
+ def test_list_projects_for_alternate_domain(self):
+ self.skipTest(
+ 'N/A: LDAP does not support multiple domains')
+
+ def test_get_default_domain_by_name(self):
+ domain = self._get_domain_fixture()
+
+ domain_ref = self.resource_api.get_domain_by_name(domain['name'])
+ self.assertEqual(domain_ref, domain)
+
+ def test_base_ldap_connection_deref_option(self):
+ def get_conn(deref_name):
+ self.config_fixture.config(group='ldap',
+ alias_dereferencing=deref_name)
+ base_ldap = common_ldap.BaseLdap(CONF)
+ return base_ldap.get_connection()
+
+ conn = get_conn('default')
+ self.assertEqual(ldap.get_option(ldap.OPT_DEREF),
+ conn.get_option(ldap.OPT_DEREF))
+
+ conn = get_conn('always')
+ self.assertEqual(ldap.DEREF_ALWAYS,
+ conn.get_option(ldap.OPT_DEREF))
+
+ conn = get_conn('finding')
+ self.assertEqual(ldap.DEREF_FINDING,
+ conn.get_option(ldap.OPT_DEREF))
+
+ conn = get_conn('never')
+ self.assertEqual(ldap.DEREF_NEVER,
+ conn.get_option(ldap.OPT_DEREF))
+
+ conn = get_conn('searching')
+ self.assertEqual(ldap.DEREF_SEARCHING,
+ conn.get_option(ldap.OPT_DEREF))
+
+ def test_list_users_no_dn(self):
+ users = self.identity_api.list_users()
+ self.assertEqual(len(default_fixtures.USERS), len(users))
+ user_ids = set(user['id'] for user in users)
+ expected_user_ids = set(getattr(self, 'user_%s' % user['id'])['id']
+ for user in default_fixtures.USERS)
+ for user_ref in users:
+ self.assertNotIn('dn', user_ref)
+ self.assertEqual(expected_user_ids, user_ids)
+
+ def test_list_groups_no_dn(self):
+ # Create some test groups.
+ domain = self._get_domain_fixture()
+ expected_group_ids = []
+ numgroups = 3
+ for _ in range(numgroups):
+ group = {'name': uuid.uuid4().hex, 'domain_id': domain['id']}
+ group = self.identity_api.create_group(group)
+ expected_group_ids.append(group['id'])
+ # Fetch the test groups and ensure that they don't contain a dn.
+ groups = self.identity_api.list_groups()
+ self.assertEqual(numgroups, len(groups))
+ group_ids = set(group['id'] for group in groups)
+ for group_ref in groups:
+ self.assertNotIn('dn', group_ref)
+ self.assertEqual(set(expected_group_ids), group_ids)
+
+ def test_list_groups_for_user_no_dn(self):
+ # Create a test user.
+ user = {'name': uuid.uuid4().hex,
+ 'domain_id': CONF.identity.default_domain_id,
+ 'password': uuid.uuid4().hex, 'enabled': True}
+ user = self.identity_api.create_user(user)
+ # Create some test groups and add the test user as a member.
+ domain = self._get_domain_fixture()
+ expected_group_ids = []
+ numgroups = 3
+ for _ in range(numgroups):
+ group = {'name': uuid.uuid4().hex, 'domain_id': domain['id']}
+ group = self.identity_api.create_group(group)
+ expected_group_ids.append(group['id'])
+ self.identity_api.add_user_to_group(user['id'], group['id'])
+ # Fetch the groups for the test user
+ # and ensure they don't contain a dn.
+ groups = self.identity_api.list_groups_for_user(user['id'])
+ self.assertEqual(numgroups, len(groups))
+ group_ids = set(group['id'] for group in groups)
+ for group_ref in groups:
+ self.assertNotIn('dn', group_ref)
+ self.assertEqual(set(expected_group_ids), group_ids)
+
+ def test_user_id_attribute_in_create(self):
+ conf = self.get_config(CONF.identity.default_domain_id)
+ conf.ldap.user_id_attribute = 'mail'
+ self.reload_backends(CONF.identity.default_domain_id)
+
+ user = {'name': u'fäké1',
+ 'password': u'fäképass1',
+ 'domain_id': CONF.identity.default_domain_id}
+ user = self.identity_api.create_user(user)
+ user_ref = self.identity_api.get_user(user['id'])
+ # 'email' attribute should've created because it is also being used
+ # as user_id
+ self.assertEqual(user_ref['id'], user_ref['email'])
+
+ def test_user_id_attribute_map(self):
+ conf = self.get_config(CONF.identity.default_domain_id)
+ conf.ldap.user_id_attribute = 'mail'
+ self.reload_backends(CONF.identity.default_domain_id)
+
+ user_ref = self.identity_api.get_user(self.user_foo['email'])
+ # the user_id_attribute map should be honored, which means
+ # user_ref['id'] should contains the email attribute
+ self.assertEqual(self.user_foo['email'], user_ref['id'])
+
+ @mock.patch.object(common_ldap_core.BaseLdap, '_ldap_get')
+ def test_get_id_from_dn_for_multivalued_attribute_id(self, mock_ldap_get):
+ conf = self.get_config(CONF.identity.default_domain_id)
+ conf.ldap.user_id_attribute = 'mail'
+ self.reload_backends(CONF.identity.default_domain_id)
+
+ # make 'email' multivalued so we can test the error condition
+ email1 = uuid.uuid4().hex
+ email2 = uuid.uuid4().hex
+ mock_ldap_get.return_value = (
+ 'cn=nobodycares,dc=example,dc=com',
+ {
+ 'sn': [uuid.uuid4().hex],
+ 'mail': [email1, email2],
+ 'cn': 'nobodycares'
+ }
+ )
+
+ user_ref = self.identity_api.get_user(email1)
+ # make sure we get the ID from DN (old behavior) if the ID attribute
+ # has multiple values
+ self.assertEqual('nobodycares', user_ref['id'])
+
+ @mock.patch.object(common_ldap_core.BaseLdap, '_ldap_get')
+ def test_id_attribute_not_found(self, mock_ldap_get):
+ mock_ldap_get.return_value = (
+ 'cn=nobodycares,dc=example,dc=com',
+ {
+ 'sn': [uuid.uuid4().hex],
+ }
+ )
+
+ user_api = identity.backends.ldap.UserApi(CONF)
+ self.assertRaises(exception.NotFound,
+ user_api.get,
+ 'nobodycares')
+
+ @mock.patch.object(common_ldap_core.BaseLdap, '_ldap_get')
+ def test_user_id_not_in_dn(self, mock_ldap_get):
+ conf = self.get_config(CONF.identity.default_domain_id)
+ conf.ldap.user_id_attribute = 'uid'
+ conf.ldap.user_name_attribute = 'cn'
+ self.reload_backends(CONF.identity.default_domain_id)
+
+ mock_ldap_get.return_value = (
+ 'foo=bar,dc=example,dc=com',
+ {
+ 'sn': [uuid.uuid4().hex],
+ 'foo': ['bar'],
+ 'cn': ['junk'],
+ 'uid': ['crap']
+ }
+ )
+ user_ref = self.identity_api.get_user('crap')
+ self.assertEqual('crap', user_ref['id'])
+ self.assertEqual('junk', user_ref['name'])
+
+ @mock.patch.object(common_ldap_core.BaseLdap, '_ldap_get')
+ def test_user_name_in_dn(self, mock_ldap_get):
+ conf = self.get_config(CONF.identity.default_domain_id)
+ conf.ldap.user_id_attribute = 'sAMAccountName'
+ conf.ldap.user_name_attribute = 'cn'
+ self.reload_backends(CONF.identity.default_domain_id)
+
+ mock_ldap_get.return_value = (
+ 'cn=Foo Bar,dc=example,dc=com',
+ {
+ 'sn': [uuid.uuid4().hex],
+ 'cn': ['Foo Bar'],
+ 'SAMAccountName': ['crap']
+ }
+ )
+ user_ref = self.identity_api.get_user('crap')
+ self.assertEqual('crap', user_ref['id'])
+ self.assertEqual('Foo Bar', user_ref['name'])
+
+
+class LDAPIdentityEnabledEmulation(LDAPIdentity):
+ def setUp(self):
+ super(LDAPIdentityEnabledEmulation, self).setUp()
+ self.clear_database()
+ self.load_backends()
+ self.load_fixtures(default_fixtures)
+ for obj in [self.tenant_bar, self.tenant_baz, self.user_foo,
+ self.user_two, self.user_badguy]:
+ obj.setdefault('enabled', True)
+
+ def load_fixtures(self, fixtures):
+ # Override super impl since need to create group container.
+ create_group_container(self.identity_api)
+ super(LDAPIdentity, self).load_fixtures(fixtures)
+
+ def config_files(self):
+ config_files = super(LDAPIdentityEnabledEmulation, self).config_files()
+ config_files.append(tests.dirs.tests_conf('backend_ldap.conf'))
+ return config_files
+
+ def config_overrides(self):
+ super(LDAPIdentityEnabledEmulation, self).config_overrides()
+ self.config_fixture.config(group='ldap',
+ user_enabled_emulation=True,
+ project_enabled_emulation=True)
+
+ def test_project_crud(self):
+ # NOTE(topol): LDAPIdentityEnabledEmulation will create an
+ # enabled key in the project dictionary so this
+ # method override handles this side-effect
+ project = {
+ 'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'domain_id': CONF.identity.default_domain_id,
+ 'description': uuid.uuid4().hex,
+ 'parent_id': None}
+
+ self.resource_api.create_project(project['id'], project)
+ project_ref = self.resource_api.get_project(project['id'])
+
+ # self.resource_api.create_project adds an enabled
+ # key with a value of True when LDAPIdentityEnabledEmulation
+ # is used so we now add this expected key to the project dictionary
+ project['enabled'] = True
+ self.assertDictEqual(project_ref, project)
+
+ project['description'] = uuid.uuid4().hex
+ self.resource_api.update_project(project['id'], project)
+ project_ref = self.resource_api.get_project(project['id'])
+ self.assertDictEqual(project_ref, project)
+
+ self.resource_api.delete_project(project['id'])
+ self.assertRaises(exception.ProjectNotFound,
+ self.resource_api.get_project,
+ project['id'])
+
+ def test_user_crud(self):
+ user_dict = {
+ 'domain_id': CONF.identity.default_domain_id,
+ 'name': uuid.uuid4().hex,
+ 'password': uuid.uuid4().hex}
+ user = self.identity_api.create_user(user_dict)
+ user_dict['enabled'] = True
+ user_ref = self.identity_api.get_user(user['id'])
+ del user_dict['password']
+ user_ref_dict = {x: user_ref[x] for x in user_ref}
+ self.assertDictContainsSubset(user_dict, user_ref_dict)
+
+ user_dict['password'] = uuid.uuid4().hex
+ self.identity_api.update_user(user['id'], user)
+ user_ref = self.identity_api.get_user(user['id'])
+ del user_dict['password']
+ user_ref_dict = {x: user_ref[x] for x in user_ref}
+ self.assertDictContainsSubset(user_dict, user_ref_dict)
+
+ self.identity_api.delete_user(user['id'])
+ self.assertRaises(exception.UserNotFound,
+ self.identity_api.get_user,
+ user['id'])
+
+ def test_user_auth_emulated(self):
+ self.config_fixture.config(group='ldap',
+ user_enabled_emulation_dn='cn=test,dc=test')
+ self.reload_backends(CONF.identity.default_domain_id)
+ self.identity_api.authenticate(
+ context={},
+ user_id=self.user_foo['id'],
+ password=self.user_foo['password'])
+
+ def test_user_enable_attribute_mask(self):
+ self.skipTest(
+ "Enabled emulation conflicts with enabled mask")
+
+ def test_user_enabled_invert(self):
+ self.config_fixture.config(group='ldap', user_enabled_invert=True,
+ user_enabled_default=False)
+ self.clear_database()
+ self.load_backends()
+ self.load_fixtures(default_fixtures)
+
+ user1 = {'name': u'fäké1', 'enabled': True,
+ 'domain_id': CONF.identity.default_domain_id}
+
+ user2 = {'name': u'fäké2', 'enabled': False,
+ 'domain_id': CONF.identity.default_domain_id}
+
+ user3 = {'name': u'fäké3',
+ 'domain_id': CONF.identity.default_domain_id}
+
+ # Ensure that the enabled LDAP attribute is not set for a
+ # newly created enabled user.
+ user_ref = self.identity_api.create_user(user1)
+ self.assertIs(True, user_ref['enabled'])
+ self.assertIsNone(self.get_user_enabled_vals(user_ref))
+ user_ref = self.identity_api.get_user(user_ref['id'])
+ self.assertIs(True, user_ref['enabled'])
+
+ # Ensure that an enabled LDAP attribute is not set for a disabled user.
+ user1['enabled'] = False
+ user_ref = self.identity_api.update_user(user_ref['id'], user1)
+ self.assertIs(False, user_ref['enabled'])
+ self.assertIsNone(self.get_user_enabled_vals(user_ref))
+
+ # Enable the user and ensure that the LDAP enabled
+ # attribute is not set.
+ user1['enabled'] = True
+ user_ref = self.identity_api.update_user(user_ref['id'], user1)
+ self.assertIs(True, user_ref['enabled'])
+ self.assertIsNone(self.get_user_enabled_vals(user_ref))
+
+ # Ensure that the LDAP enabled attribute is not set for a
+ # newly created disabled user.
+ user_ref = self.identity_api.create_user(user2)
+ self.assertIs(False, user_ref['enabled'])
+ self.assertIsNone(self.get_user_enabled_vals(user_ref))
+ user_ref = self.identity_api.get_user(user_ref['id'])
+ self.assertIs(False, user_ref['enabled'])
+
+ # Ensure that the LDAP enabled attribute is not set for a newly created
+ # user when the user_enabled_default setting is used.
+ user_ref = self.identity_api.create_user(user3)
+ self.assertIs(True, user_ref['enabled'])
+ self.assertIsNone(self.get_user_enabled_vals(user_ref))
+ user_ref = self.identity_api.get_user(user_ref['id'])
+ self.assertIs(True, user_ref['enabled'])
+
+ def test_user_enabled_invert_no_enabled_value(self):
+ self.skipTest(
+ "N/A: Covered by test_user_enabled_invert")
+
+ def test_user_enabled_invert_default_str_value(self):
+ self.skipTest(
+ "N/A: Covered by test_user_enabled_invert")
+
+ @mock.patch.object(common_ldap_core.BaseLdap, '_ldap_get')
+ def test_user_enabled_attribute_handles_utf8(self, mock_ldap_get):
+ # Since user_enabled_emulation is enabled in this test, this test will
+ # fail since it's using user_enabled_invert.
+ self.config_fixture.config(group='ldap', user_enabled_invert=True,
+ user_enabled_attribute='passwordisexpired')
+ mock_ldap_get.return_value = (
+ u'uid=123456789,c=us,ou=our_ldap,o=acme.com',
+ {
+ 'uid': [123456789],
+ 'mail': [u'shaun@acme.com'],
+ 'passwordisexpired': [u'false'],
+ 'cn': [u'uid=123456789,c=us,ou=our_ldap,o=acme.com']
+ }
+ )
+
+ user_api = identity.backends.ldap.UserApi(CONF)
+ user_ref = user_api.get('123456789')
+ self.assertIs(False, user_ref['enabled'])
+
+
+class LdapIdentitySqlAssignment(BaseLDAPIdentity, tests.SQLDriverOverrides,
+ tests.TestCase):
+
+ def config_files(self):
+ config_files = super(LdapIdentitySqlAssignment, self).config_files()
+ config_files.append(tests.dirs.tests_conf('backend_ldap_sql.conf'))
+ return config_files
+
+ def setUp(self):
+ self.useFixture(database.Database())
+ super(LdapIdentitySqlAssignment, self).setUp()
+ self.clear_database()
+ self.load_backends()
+ cache.configure_cache_region(cache.REGION)
+ self.engine = sql.get_engine()
+ self.addCleanup(sql.cleanup)
+
+ sql.ModelBase.metadata.create_all(bind=self.engine)
+ self.addCleanup(sql.ModelBase.metadata.drop_all, bind=self.engine)
+
+ self.load_fixtures(default_fixtures)
+ # defaulted by the data load
+ self.user_foo['enabled'] = True
+
+ def config_overrides(self):
+ super(LdapIdentitySqlAssignment, self).config_overrides()
+ self.config_fixture.config(
+ group='identity',
+ driver='keystone.identity.backends.ldap.Identity')
+ self.config_fixture.config(
+ group='resource',
+ driver='keystone.resource.backends.sql.Resource')
+ self.config_fixture.config(
+ group='assignment',
+ driver='keystone.assignment.backends.sql.Assignment')
+
+ def test_domain_crud(self):
+ pass
+
+ def test_list_domains(self):
+ domains = self.resource_api.list_domains()
+ self.assertEqual([resource.calc_default_domain()], domains)
+
+ def test_list_domains_non_default_domain_id(self):
+ # If change the default_domain_id, the ID of the default domain
+ # returned by list_domains doesn't change because the SQL identity
+ # backend reads it from the database, which doesn't get updated by
+ # config change.
+
+ orig_default_domain_id = CONF.identity.default_domain_id
+
+ new_domain_id = uuid.uuid4().hex
+ self.config_fixture.config(group='identity',
+ default_domain_id=new_domain_id)
+
+ domains = self.resource_api.list_domains()
+
+ self.assertEqual(orig_default_domain_id, domains[0]['id'])
+
+ def test_create_domain(self):
+ domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
+ 'enabled': True}
+ self.assertRaises(exception.Forbidden,
+ self.resource_api.create_domain,
+ domain['id'],
+ domain)
+
+ def test_get_and_remove_role_grant_by_group_and_domain(self):
+ # TODO(henry-nash): We should really rewrite the tests in test_backend
+ # to be more flexible as to where the domains are sourced from, so
+ # that we would not need to override such tests here. This is raised
+ # as bug 1373865.
+ new_domain = self._get_domain_fixture()
+ new_group = {'domain_id': new_domain['id'], 'name': uuid.uuid4().hex}
+ new_group = self.identity_api.create_group(new_group)
+ new_user = {'name': 'new_user', 'password': uuid.uuid4().hex,
+ 'enabled': True, 'domain_id': new_domain['id']}
+ new_user = self.identity_api.create_user(new_user)
+ self.identity_api.add_user_to_group(new_user['id'],
+ new_group['id'])
+
+ roles_ref = self.assignment_api.list_grants(
+ group_id=new_group['id'],
+ domain_id=new_domain['id'])
+ self.assertEqual(0, len(roles_ref))
+
+ self.assignment_api.create_grant(group_id=new_group['id'],
+ domain_id=new_domain['id'],
+ role_id='member')
+
+ roles_ref = self.assignment_api.list_grants(
+ group_id=new_group['id'],
+ domain_id=new_domain['id'])
+ self.assertDictEqual(roles_ref[0], self.role_member)
+
+ self.assignment_api.delete_grant(group_id=new_group['id'],
+ domain_id=new_domain['id'],
+ role_id='member')
+ roles_ref = self.assignment_api.list_grants(
+ group_id=new_group['id'],
+ domain_id=new_domain['id'])
+ self.assertEqual(0, len(roles_ref))
+ self.assertRaises(exception.NotFound,
+ self.assignment_api.delete_grant,
+ group_id=new_group['id'],
+ domain_id=new_domain['id'],
+ role_id='member')
+
+ def test_project_enabled_ignored_disable_error(self):
+ # Override
+ self.skipTest("Doesn't apply since LDAP configuration is ignored for "
+ "SQL assignment backend.")
+
+
+class LdapIdentitySqlAssignmentWithMapping(LdapIdentitySqlAssignment):
+ """Class to test mapping of default LDAP backend.
+
+ The default configuration is not to enable mapping when using a single
+ backend LDAP driver. However, a cloud provider might want to enable
+ the mapping, hence hiding the LDAP IDs from any clients of keystone.
+ Setting backward_compatible_ids to False will enable this mapping.
+
+ """
+ def config_overrides(self):
+ super(LdapIdentitySqlAssignmentWithMapping, self).config_overrides()
+ self.config_fixture.config(group='identity_mapping',
+ backward_compatible_ids=False)
+
+ def test_dynamic_mapping_build(self):
+ """Test to ensure entities not create via controller are mapped.
+
+ Many LDAP backends will, essentially, by Read Only. In these cases
+ the mapping is not built by creating objects, rather from enumerating
+ the entries. We test this here my manually deleting the mapping and
+ then trying to re-read the entries.
+
+ """
+ initial_mappings = len(mapping_sql.list_id_mappings())
+ user1 = {'name': uuid.uuid4().hex,
+ 'domain_id': CONF.identity.default_domain_id,
+ 'password': uuid.uuid4().hex, 'enabled': True}
+ user1 = self.identity_api.create_user(user1)
+ user2 = {'name': uuid.uuid4().hex,
+ 'domain_id': CONF.identity.default_domain_id,
+ 'password': uuid.uuid4().hex, 'enabled': True}
+ user2 = self.identity_api.create_user(user2)
+ mappings = mapping_sql.list_id_mappings()
+ self.assertEqual(initial_mappings + 2, len(mappings))
+
+ # Now delete the mappings for the two users above
+ self.id_mapping_api.purge_mappings({'public_id': user1['id']})
+ self.id_mapping_api.purge_mappings({'public_id': user2['id']})
+
+ # We should no longer be able to get these users via their old IDs
+ self.assertRaises(exception.UserNotFound,
+ self.identity_api.get_user,
+ user1['id'])
+ self.assertRaises(exception.UserNotFound,
+ self.identity_api.get_user,
+ user2['id'])
+
+ # Now enumerate all users...this should re-build the mapping, and
+ # we should be able to find the users via their original public IDs.
+ self.identity_api.list_users()
+ self.identity_api.get_user(user1['id'])
+ self.identity_api.get_user(user2['id'])
+
+ def test_get_roles_for_user_and_project_user_group_same_id(self):
+ self.skipTest('N/A: We never generate the same ID for a user and '
+ 'group in our mapping table')
+
+
+class BaseMultiLDAPandSQLIdentity(object):
+ """Mixin class with support methods for domain-specific config testing."""
+
+ def create_user(self, domain_id):
+ user = {'name': uuid.uuid4().hex,
+ 'domain_id': domain_id,
+ 'password': uuid.uuid4().hex,
+ 'enabled': True}
+ user_ref = self.identity_api.create_user(user)
+ # Put the password back in, since this is used later by tests to
+ # authenticate.
+ user_ref['password'] = user['password']
+ return user_ref
+
+ def create_users_across_domains(self):
+ """Create a set of users, each with a role on their own domain."""
+
+ # We also will check that the right number of id mappings get created
+ initial_mappings = len(mapping_sql.list_id_mappings())
+
+ self.users['user0'] = self.create_user(
+ self.domains['domain_default']['id'])
+ self.assignment_api.create_grant(
+ user_id=self.users['user0']['id'],
+ domain_id=self.domains['domain_default']['id'],
+ role_id=self.role_member['id'])
+ for x in range(1, self.domain_count):
+ self.users['user%s' % x] = self.create_user(
+ self.domains['domain%s' % x]['id'])
+ self.assignment_api.create_grant(
+ user_id=self.users['user%s' % x]['id'],
+ domain_id=self.domains['domain%s' % x]['id'],
+ role_id=self.role_member['id'])
+
+ # So how many new id mappings should have been created? One for each
+ # user created in a domain that is using the non default driver..
+ self.assertEqual(initial_mappings + self.domain_specific_count,
+ len(mapping_sql.list_id_mappings()))
+
+ def check_user(self, user, domain_id, expected_status):
+ """Check user is in correct backend.
+
+ As part of the tests, we want to force ourselves to manually
+ select the driver for a given domain, to make sure the entity
+ ended up in the correct backend.
+
+ """
+ driver = self.identity_api._select_identity_driver(domain_id)
+ unused, unused, entity_id = (
+ self.identity_api._get_domain_driver_and_entity_id(
+ user['id']))
+
+ if expected_status == 200:
+ ref = driver.get_user(entity_id)
+ ref = self.identity_api._set_domain_id_and_mapping(
+ ref, domain_id, driver, map.EntityType.USER)
+ user = user.copy()
+ del user['password']
+ self.assertDictEqual(ref, user)
+ else:
+ # TODO(henry-nash): Use AssertRaises here, although
+ # there appears to be an issue with using driver.get_user
+ # inside that construct
+ try:
+ driver.get_user(entity_id)
+ except expected_status:
+ pass
+
+ def setup_initial_domains(self):
+
+ def create_domain(domain):
+ try:
+ ref = self.resource_api.create_domain(
+ domain['id'], domain)
+ except exception.Conflict:
+ ref = (
+ self.resource_api.get_domain_by_name(domain['name']))
+ return ref
+
+ self.domains = {}
+ for x in range(1, self.domain_count):
+ domain = 'domain%s' % x
+ self.domains[domain] = create_domain(
+ {'id': uuid.uuid4().hex, 'name': domain})
+ self.domains['domain_default'] = create_domain(
+ resource.calc_default_domain())
+
+ def test_authenticate_to_each_domain(self):
+ """Test that a user in each domain can authenticate."""
+ for user_num in range(self.domain_count):
+ user = 'user%s' % user_num
+ self.identity_api.authenticate(
+ context={},
+ user_id=self.users[user]['id'],
+ password=self.users[user]['password'])
+
+
+class MultiLDAPandSQLIdentity(BaseLDAPIdentity, tests.SQLDriverOverrides,
+ tests.TestCase, BaseMultiLDAPandSQLIdentity):
+ """Class to test common SQL plus individual LDAP backends.
+
+ We define a set of domains and domain-specific backends:
+
+ - A separate LDAP backend for the default domain
+ - A separate LDAP backend for domain1
+ - domain2 shares the same LDAP as domain1, but uses a different
+ tree attach point
+ - An SQL backend for all other domains (which will include domain3
+ and domain4)
+
+ Normally one would expect that the default domain would be handled as
+ part of the "other domains" - however the above provides better
+ test coverage since most of the existing backend tests use the default
+ domain.
+
+ """
+ def setUp(self):
+ self.useFixture(database.Database())
+ super(MultiLDAPandSQLIdentity, self).setUp()
+
+ self.load_backends()
+
+ self.engine = sql.get_engine()
+ self.addCleanup(sql.cleanup)
+
+ sql.ModelBase.metadata.create_all(bind=self.engine)
+ self.addCleanup(sql.ModelBase.metadata.drop_all, bind=self.engine)
+
+ self.domain_count = 5
+ self.domain_specific_count = 3
+ self.setup_initial_domains()
+ self._setup_initial_users()
+
+ # All initial test data setup complete, time to switch on support
+ # for separate backends per domain.
+ self.enable_multi_domain()
+
+ self.clear_database()
+ self.load_fixtures(default_fixtures)
+ self.create_users_across_domains()
+
+ def config_overrides(self):
+ super(MultiLDAPandSQLIdentity, self).config_overrides()
+ # Make sure identity and assignment are actually SQL drivers,
+ # BaseLDAPIdentity sets these options to use LDAP.
+ self.config_fixture.config(
+ group='identity',
+ driver='keystone.identity.backends.sql.Identity')
+ self.config_fixture.config(
+ group='resource',
+ driver='keystone.resource.backends.sql.Resource')
+ self.config_fixture.config(
+ group='assignment',
+ driver='keystone.assignment.backends.sql.Assignment')
+
+ def _setup_initial_users(self):
+ # Create some identity entities BEFORE we switch to multi-backend, so
+ # we can test that these are still accessible
+ self.users = {}
+ self.users['userA'] = self.create_user(
+ self.domains['domain_default']['id'])
+ self.users['userB'] = self.create_user(
+ self.domains['domain1']['id'])
+ self.users['userC'] = self.create_user(
+ self.domains['domain3']['id'])
+
+ def enable_multi_domain(self):
+ """Enable the chosen form of multi domain configuration support.
+
+ This method enables the file-based configuration support. Child classes
+ that wish to use the database domain configuration support should
+ override this method and set the appropriate config_fixture option.
+
+ """
+ self.config_fixture.config(
+ group='identity', domain_specific_drivers_enabled=True,
+ domain_config_dir=tests.TESTCONF + '/domain_configs_multi_ldap')
+ self.config_fixture.config(group='identity_mapping',
+ backward_compatible_ids=False)
+
+ def reload_backends(self, domain_id):
+ # Just reload the driver for this domain - which will pickup
+ # any updated cfg
+ self.identity_api.domain_configs.reload_domain_driver(domain_id)
+
+ def get_config(self, domain_id):
+ # Get the config for this domain, will return CONF
+ # if no specific config defined for this domain
+ return self.identity_api.domain_configs.get_domain_conf(domain_id)
+
+ def test_list_domains(self):
+ self.skipTest(
+ 'N/A: Not relevant for multi ldap testing')
+
+ def test_list_domains_non_default_domain_id(self):
+ self.skipTest(
+ 'N/A: Not relevant for multi ldap testing')
+
+ def test_list_users(self):
+ # Override the standard list users, since we have added an extra user
+ # to the default domain, so the number of expected users is one more
+ # than in the standard test.
+ users = self.identity_api.list_users(
+ domain_scope=self._set_domain_scope(
+ CONF.identity.default_domain_id))
+ self.assertEqual(len(default_fixtures.USERS) + 1, len(users))
+ user_ids = set(user['id'] for user in users)
+ expected_user_ids = set(getattr(self, 'user_%s' % user['id'])['id']
+ for user in default_fixtures.USERS)
+ expected_user_ids.add(self.users['user0']['id'])
+ for user_ref in users:
+ self.assertNotIn('password', user_ref)
+ self.assertEqual(expected_user_ids, user_ids)
+
+ def test_domain_segregation(self):
+ """Test that separate configs have segregated the domain.
+
+ Test Plan:
+
+ - Users were created in each domain as part of setup, now make sure
+ you can only find a given user in its relevant domain/backend
+ - Make sure that for a backend that supports multiple domains
+ you can get the users via any of its domains
+
+ """
+ # Check that I can read a user with the appropriate domain-selected
+ # driver, but won't find it via any other domain driver
+
+ check_user = self.check_user
+ check_user(self.users['user0'],
+ self.domains['domain_default']['id'], 200)
+ for domain in [self.domains['domain1']['id'],
+ self.domains['domain2']['id'],
+ self.domains['domain3']['id'],
+ self.domains['domain4']['id']]:
+ check_user(self.users['user0'], domain, exception.UserNotFound)
+
+ check_user(self.users['user1'], self.domains['domain1']['id'], 200)
+ for domain in [self.domains['domain_default']['id'],
+ self.domains['domain2']['id'],
+ self.domains['domain3']['id'],
+ self.domains['domain4']['id']]:
+ check_user(self.users['user1'], domain, exception.UserNotFound)
+
+ check_user(self.users['user2'], self.domains['domain2']['id'], 200)
+ for domain in [self.domains['domain_default']['id'],
+ self.domains['domain1']['id'],
+ self.domains['domain3']['id'],
+ self.domains['domain4']['id']]:
+ check_user(self.users['user2'], domain, exception.UserNotFound)
+
+ # domain3 and domain4 share the same backend, so you should be
+ # able to see user3 and user4 from either.
+
+ check_user(self.users['user3'], self.domains['domain3']['id'], 200)
+ check_user(self.users['user3'], self.domains['domain4']['id'], 200)
+ check_user(self.users['user4'], self.domains['domain3']['id'], 200)
+ check_user(self.users['user4'], self.domains['domain4']['id'], 200)
+
+ for domain in [self.domains['domain_default']['id'],
+ self.domains['domain1']['id'],
+ self.domains['domain2']['id']]:
+ check_user(self.users['user3'], domain, exception.UserNotFound)
+ check_user(self.users['user4'], domain, exception.UserNotFound)
+
+ # Finally, going through the regular manager layer, make sure we
+ # only see the right number of users in each of the non-default
+ # domains. One might have expected two users in domain1 (since we
+ # created one before we switched to multi-backend), however since
+ # that domain changed backends in the switch we don't find it anymore.
+ # This is as designed - we don't support moving domains between
+ # backends.
+ #
+ # The listing of the default domain is already handled in the
+ # test_lists_users() method.
+ for domain in [self.domains['domain1']['id'],
+ self.domains['domain2']['id'],
+ self.domains['domain4']['id']]:
+ self.assertThat(
+ self.identity_api.list_users(domain_scope=domain),
+ matchers.HasLength(1))
+
+ # domain3 had a user created before we switched on
+ # multiple backends, plus one created afterwards - and its
+ # backend has not changed - so we should find two.
+ self.assertThat(
+ self.identity_api.list_users(
+ domain_scope=self.domains['domain3']['id']),
+ matchers.HasLength(2))
+
+ def test_existing_uuids_work(self):
+ """Test that 'uni-domain' created IDs still work.
+
+ Throwing the switch to domain-specific backends should not cause
+ existing identities to be inaccessible via ID.
+
+ """
+ self.identity_api.get_user(self.users['userA']['id'])
+ self.identity_api.get_user(self.users['userB']['id'])
+ self.identity_api.get_user(self.users['userC']['id'])
+
+ def test_scanning_of_config_dir(self):
+ """Test the Manager class scans the config directory.
+
+ The setup for the main tests above load the domain configs directly
+ so that the test overrides can be included. This test just makes sure
+ that the standard config directory scanning does pick up the relevant
+ domain config files.
+
+ """
+ # Confirm that config has drivers_enabled as True, which we will
+ # check has been set to False later in this test
+ self.assertTrue(CONF.identity.domain_specific_drivers_enabled)
+ self.load_backends()
+ # Execute any command to trigger the lazy loading of domain configs
+ self.identity_api.list_users(
+ domain_scope=self.domains['domain1']['id'])
+ # ...and now check the domain configs have been set up
+ self.assertIn('default', self.identity_api.domain_configs)
+ self.assertIn(self.domains['domain1']['id'],
+ self.identity_api.domain_configs)
+ self.assertIn(self.domains['domain2']['id'],
+ self.identity_api.domain_configs)
+ self.assertNotIn(self.domains['domain3']['id'],
+ self.identity_api.domain_configs)
+ self.assertNotIn(self.domains['domain4']['id'],
+ self.identity_api.domain_configs)
+
+ # Finally check that a domain specific config contains items from both
+ # the primary config and the domain specific config
+ conf = self.identity_api.domain_configs.get_domain_conf(
+ self.domains['domain1']['id'])
+ # This should now be false, as is the default, since this is not
+ # set in the standard primary config file
+ self.assertFalse(conf.identity.domain_specific_drivers_enabled)
+ # ..and make sure a domain-specific options is also set
+ self.assertEqual('fake://memory1', conf.ldap.url)
+
+ def test_delete_domain_with_user_added(self):
+ domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
+ 'enabled': True}
+ project = {'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'domain_id': domain['id'],
+ 'description': uuid.uuid4().hex,
+ 'parent_id': None,
+ 'enabled': True}
+ self.resource_api.create_domain(domain['id'], domain)
+ self.resource_api.create_project(project['id'], project)
+ project_ref = self.resource_api.get_project(project['id'])
+ self.assertDictEqual(project_ref, project)
+
+ self.assignment_api.create_grant(user_id=self.user_foo['id'],
+ project_id=project['id'],
+ role_id=self.role_member['id'])
+ self.assignment_api.delete_grant(user_id=self.user_foo['id'],
+ project_id=project['id'],
+ role_id=self.role_member['id'])
+ domain['enabled'] = False
+ self.resource_api.update_domain(domain['id'], domain)
+ self.resource_api.delete_domain(domain['id'])
+ self.assertRaises(exception.DomainNotFound,
+ self.resource_api.get_domain,
+ domain['id'])
+
+ def test_user_enabled_ignored_disable_error(self):
+ # Override.
+ self.skipTest("Doesn't apply since LDAP config has no affect on the "
+ "SQL identity backend.")
+
+ def test_group_enabled_ignored_disable_error(self):
+ # Override.
+ self.skipTest("Doesn't apply since LDAP config has no affect on the "
+ "SQL identity backend.")
+
+ def test_project_enabled_ignored_disable_error(self):
+ # Override
+ self.skipTest("Doesn't apply since LDAP configuration is ignored for "
+ "SQL assignment backend.")
+
+
+class MultiLDAPandSQLIdentityDomainConfigsInSQL(MultiLDAPandSQLIdentity):
+ """Class to test the use of domain configs stored in the database.
+
+ Repeat the same tests as MultiLDAPandSQLIdentity, but instead of using the
+ domain specific config files, store the domain specific values in the
+ database.
+
+ """
+ def enable_multi_domain(self):
+ # The values below are the same as in the domain_configs_multi_ldap
+ # cdirectory of test config_files.
+ default_config = {
+ 'ldap': {'url': 'fake://memory',
+ 'user': 'cn=Admin',
+ 'password': 'password',
+ 'suffix': 'cn=example,cn=com'},
+ 'identity': {'driver': 'keystone.identity.backends.ldap.Identity'}
+ }
+ domain1_config = {
+ 'ldap': {'url': 'fake://memory1',
+ 'user': 'cn=Admin',
+ 'password': 'password',
+ 'suffix': 'cn=example,cn=com'},
+ 'identity': {'driver': 'keystone.identity.backends.ldap.Identity'}
+ }
+ domain2_config = {
+ 'ldap': {'url': 'fake://memory',
+ 'user': 'cn=Admin',
+ 'password': 'password',
+ 'suffix': 'cn=myroot,cn=com',
+ 'group_tree_dn': 'ou=UserGroups,dc=myroot,dc=org',
+ 'user_tree_dn': 'ou=Users,dc=myroot,dc=org'},
+ 'identity': {'driver': 'keystone.identity.backends.ldap.Identity'}
+ }
+
+ self.domain_config_api.create_config(CONF.identity.default_domain_id,
+ default_config)
+ self.domain_config_api.create_config(self.domains['domain1']['id'],
+ domain1_config)
+ self.domain_config_api.create_config(self.domains['domain2']['id'],
+ domain2_config)
+
+ self.config_fixture.config(
+ group='identity', domain_specific_drivers_enabled=True,
+ domain_configurations_from_database=True)
+ self.config_fixture.config(group='identity_mapping',
+ backward_compatible_ids=False)
+
+ def test_domain_config_has_no_impact_if_database_support_disabled(self):
+ """Ensure database domain configs have no effect if disabled.
+
+ Set reading from database configs to false, restart the backends
+ and then try and set and use database configs.
+
+ """
+ self.config_fixture.config(
+ group='identity', domain_configurations_from_database=False)
+ self.load_backends()
+ new_config = {'ldap': {'url': uuid.uuid4().hex}}
+ self.domain_config_api.create_config(
+ CONF.identity.default_domain_id, new_config)
+ # Trigger the identity backend to initialise any domain specific
+ # configurations
+ self.identity_api.list_users()
+ # Check that the new config has not been passed to the driver for
+ # the default domain.
+ default_config = (
+ self.identity_api.domain_configs.get_domain_conf(
+ CONF.identity.default_domain_id))
+ self.assertEqual(CONF.ldap.url, default_config.ldap.url)
+
+
+class DomainSpecificLDAPandSQLIdentity(
+ BaseLDAPIdentity, tests.SQLDriverOverrides, tests.TestCase,
+ BaseMultiLDAPandSQLIdentity):
+ """Class to test when all domains use specific configs, including SQL.
+
+ We define a set of domains and domain-specific backends:
+
+ - A separate LDAP backend for the default domain
+ - A separate SQL backend for domain1
+
+ Although the default driver still exists, we don't use it.
+
+ """
+ def setUp(self):
+ self.useFixture(database.Database())
+ super(DomainSpecificLDAPandSQLIdentity, self).setUp()
+ self.initial_setup()
+
+ def initial_setup(self):
+ # We aren't setting up any initial data ahead of switching to
+ # domain-specific operation, so make the switch straight away.
+ self.config_fixture.config(
+ group='identity', domain_specific_drivers_enabled=True,
+ domain_config_dir=(
+ tests.TESTCONF + '/domain_configs_one_sql_one_ldap'))
+ self.config_fixture.config(group='identity_mapping',
+ backward_compatible_ids=False)
+
+ self.load_backends()
+
+ self.engine = sql.get_engine()
+ self.addCleanup(sql.cleanup)
+
+ sql.ModelBase.metadata.create_all(bind=self.engine)
+ self.addCleanup(sql.ModelBase.metadata.drop_all, bind=self.engine)
+
+ self.domain_count = 2
+ self.domain_specific_count = 2
+ self.setup_initial_domains()
+ self.users = {}
+
+ self.clear_database()
+ self.load_fixtures(default_fixtures)
+ self.create_users_across_domains()
+
+ def config_overrides(self):
+ super(DomainSpecificLDAPandSQLIdentity, self).config_overrides()
+ # Make sure resource & assignment are actually SQL drivers,
+ # BaseLDAPIdentity causes this option to use LDAP.
+ self.config_fixture.config(
+ group='resource',
+ driver='keystone.resource.backends.sql.Resource')
+ self.config_fixture.config(
+ group='assignment',
+ driver='keystone.assignment.backends.sql.Assignment')
+
+ def reload_backends(self, domain_id):
+ # Just reload the driver for this domain - which will pickup
+ # any updated cfg
+ self.identity_api.domain_configs.reload_domain_driver(domain_id)
+
+ def get_config(self, domain_id):
+ # Get the config for this domain, will return CONF
+ # if no specific config defined for this domain
+ return self.identity_api.domain_configs.get_domain_conf(domain_id)
+
+ def test_list_domains(self):
+ self.skipTest(
+ 'N/A: Not relevant for multi ldap testing')
+
+ def test_list_domains_non_default_domain_id(self):
+ self.skipTest(
+ 'N/A: Not relevant for multi ldap testing')
+
+ def test_domain_crud(self):
+ self.skipTest(
+ 'N/A: Not relevant for multi ldap testing')
+
+ def test_list_users(self):
+ # Override the standard list users, since we have added an extra user
+ # to the default domain, so the number of expected users is one more
+ # than in the standard test.
+ users = self.identity_api.list_users(
+ domain_scope=self._set_domain_scope(
+ CONF.identity.default_domain_id))
+ self.assertEqual(len(default_fixtures.USERS) + 1, len(users))
+ user_ids = set(user['id'] for user in users)
+ expected_user_ids = set(getattr(self, 'user_%s' % user['id'])['id']
+ for user in default_fixtures.USERS)
+ expected_user_ids.add(self.users['user0']['id'])
+ for user_ref in users:
+ self.assertNotIn('password', user_ref)
+ self.assertEqual(expected_user_ids, user_ids)
+
+ def test_domain_segregation(self):
+ """Test that separate configs have segregated the domain.
+
+ Test Plan:
+
+ - Users were created in each domain as part of setup, now make sure
+ you can only find a given user in its relevant domain/backend
+ - Make sure that for a backend that supports multiple domains
+ you can get the users via any of its domains
+
+ """
+ # Check that I can read a user with the appropriate domain-selected
+ # driver, but won't find it via any other domain driver
+
+ self.check_user(self.users['user0'],
+ self.domains['domain_default']['id'], 200)
+ self.check_user(self.users['user0'],
+ self.domains['domain1']['id'], exception.UserNotFound)
+
+ self.check_user(self.users['user1'],
+ self.domains['domain1']['id'], 200)
+ self.check_user(self.users['user1'],
+ self.domains['domain_default']['id'],
+ exception.UserNotFound)
+
+ # Finally, going through the regular manager layer, make sure we
+ # only see the right number of users in the non-default domain.
+
+ self.assertThat(
+ self.identity_api.list_users(
+ domain_scope=self.domains['domain1']['id']),
+ matchers.HasLength(1))
+
+ def test_add_role_grant_to_user_and_project_404(self):
+ self.skipTest('Blocked by bug 1101287')
+
+ def test_get_role_grants_for_user_and_project_404(self):
+ self.skipTest('Blocked by bug 1101287')
+
+ def test_list_projects_for_user_with_grants(self):
+ self.skipTest('Blocked by bug 1221805')
+
+ def test_get_roles_for_user_and_project_user_group_same_id(self):
+ self.skipTest('N/A: We never generate the same ID for a user and '
+ 'group in our mapping table')
+
+ def test_user_id_comma(self):
+ self.skipTest('Only valid if it is guaranteed to be talking to '
+ 'the fakeldap backend')
+
+ def test_user_id_comma_grants(self):
+ self.skipTest('Only valid if it is guaranteed to be talking to '
+ 'the fakeldap backend')
+
+ def test_user_enabled_ignored_disable_error(self):
+ # Override.
+ self.skipTest("Doesn't apply since LDAP config has no affect on the "
+ "SQL identity backend.")
+
+ def test_group_enabled_ignored_disable_error(self):
+ # Override.
+ self.skipTest("Doesn't apply since LDAP config has no affect on the "
+ "SQL identity backend.")
+
+ def test_project_enabled_ignored_disable_error(self):
+ # Override
+ self.skipTest("Doesn't apply since LDAP configuration is ignored for "
+ "SQL assignment backend.")
+
+
+class DomainSpecificSQLIdentity(DomainSpecificLDAPandSQLIdentity):
+ """Class to test simplest use of domain-specific SQL driver.
+
+ The simplest use of an SQL domain-specific backend is when it is used to
+ augment the standard case when LDAP is the default driver defined in the
+ main config file. This would allow, for example, service users to be
+ stored in SQL while LDAP handles the rest. Hence we define:
+
+ - The default driver uses the LDAP backend for the default domain
+ - A separate SQL backend for domain1
+
+ """
+ def initial_setup(self):
+ # We aren't setting up any initial data ahead of switching to
+ # domain-specific operation, so make the switch straight away.
+ self.config_fixture.config(
+ group='identity', domain_specific_drivers_enabled=True,
+ domain_config_dir=(
+ tests.TESTCONF + '/domain_configs_default_ldap_one_sql'))
+ # Part of the testing counts how many new mappings get created as
+ # we create users, so ensure we are NOT using mapping for the default
+ # LDAP domain so this doesn't confuse the calculation.
+ self.config_fixture.config(group='identity_mapping',
+ backward_compatible_ids=True)
+
+ self.load_backends()
+
+ self.engine = sql.get_engine()
+ self.addCleanup(sql.cleanup)
+
+ sql.ModelBase.metadata.create_all(bind=self.engine)
+ self.addCleanup(sql.ModelBase.metadata.drop_all, bind=self.engine)
+
+ self.domain_count = 2
+ self.domain_specific_count = 1
+ self.setup_initial_domains()
+ self.users = {}
+
+ self.load_fixtures(default_fixtures)
+ self.create_users_across_domains()
+
+ def config_overrides(self):
+ super(DomainSpecificSQLIdentity, self).config_overrides()
+ self.config_fixture.config(
+ group='identity',
+ driver='keystone.identity.backends.ldap.Identity')
+ self.config_fixture.config(
+ group='resource',
+ driver='keystone.resource.backends.sql.Resource')
+ self.config_fixture.config(
+ group='assignment',
+ driver='keystone.assignment.backends.sql.Assignment')
+
+ def get_config(self, domain_id):
+ if domain_id == CONF.identity.default_domain_id:
+ return CONF
+ else:
+ return self.identity_api.domain_configs.get_domain_conf(domain_id)
+
+ def reload_backends(self, domain_id):
+ if domain_id == CONF.identity.default_domain_id:
+ self.load_backends()
+ else:
+ # Just reload the driver for this domain - which will pickup
+ # any updated cfg
+ self.identity_api.domain_configs.reload_domain_driver(domain_id)
+
+ def test_default_sql_plus_sql_specific_driver_fails(self):
+ # First confirm that if ldap is default driver, domain1 can be
+ # loaded as sql
+ self.config_fixture.config(
+ group='identity',
+ driver='keystone.identity.backends.ldap.Identity')
+ self.config_fixture.config(
+ group='assignment',
+ driver='keystone.assignment.backends.sql.Assignment')
+ self.load_backends()
+ # Make any identity call to initiate the lazy loading of configs
+ self.identity_api.list_users(
+ domain_scope=CONF.identity.default_domain_id)
+ self.assertIsNotNone(self.get_config(self.domains['domain1']['id']))
+
+ # Now re-initialize, but with sql as the default identity driver
+ self.config_fixture.config(
+ group='identity',
+ driver='keystone.identity.backends.sql.Identity')
+ self.config_fixture.config(
+ group='assignment',
+ driver='keystone.assignment.backends.sql.Assignment')
+ self.load_backends()
+ # Make any identity call to initiate the lazy loading of configs, which
+ # should fail since we would now have two sql drivers.
+ self.assertRaises(exception.MultipleSQLDriversInConfig,
+ self.identity_api.list_users,
+ domain_scope=CONF.identity.default_domain_id)
+
+ def test_multiple_sql_specific_drivers_fails(self):
+ self.config_fixture.config(
+ group='identity',
+ driver='keystone.identity.backends.ldap.Identity')
+ self.config_fixture.config(
+ group='assignment',
+ driver='keystone.assignment.backends.sql.Assignment')
+ self.load_backends()
+ # Ensure default, domain1 and domain2 exist
+ self.domain_count = 3
+ self.setup_initial_domains()
+ # Make any identity call to initiate the lazy loading of configs
+ self.identity_api.list_users(
+ domain_scope=CONF.identity.default_domain_id)
+ # This will only load domain1, since the domain2 config file is
+ # not stored in the same location
+ self.assertIsNotNone(self.get_config(self.domains['domain1']['id']))
+
+ # Now try and manually load a 2nd sql specific driver, for domain2,
+ # which should fail.
+ self.assertRaises(
+ exception.MultipleSQLDriversInConfig,
+ self.identity_api.domain_configs._load_config_from_file,
+ self.resource_api,
+ [tests.TESTCONF + '/domain_configs_one_extra_sql/' +
+ 'keystone.domain2.conf'],
+ 'domain2')
+
+
+class LdapFilterTests(test_backend.FilterTests, tests.TestCase):
+
+ def setUp(self):
+ super(LdapFilterTests, self).setUp()
+ self.useFixture(database.Database())
+ self.clear_database()
+
+ common_ldap.register_handler('fake://', fakeldap.FakeLdap)
+ self.load_backends()
+ self.load_fixtures(default_fixtures)
+
+ self.engine = sql.get_engine()
+ self.addCleanup(sql.cleanup)
+ sql.ModelBase.metadata.create_all(bind=self.engine)
+
+ self.addCleanup(sql.ModelBase.metadata.drop_all, bind=self.engine)
+ self.addCleanup(common_ldap_core._HANDLERS.clear)
+
+ def config_overrides(self):
+ super(LdapFilterTests, self).config_overrides()
+ self.config_fixture.config(
+ group='identity',
+ driver='keystone.identity.backends.ldap.Identity')
+
+ def config_files(self):
+ config_files = super(LdapFilterTests, self).config_files()
+ config_files.append(tests.dirs.tests_conf('backend_ldap.conf'))
+ return config_files
+
+ def clear_database(self):
+ for shelf in fakeldap.FakeShelves:
+ fakeldap.FakeShelves[shelf].clear()
diff --git a/keystone-moon/keystone/tests/unit/test_backend_ldap_pool.py b/keystone-moon/keystone/tests/unit/test_backend_ldap_pool.py
new file mode 100644
index 00000000..eee03b8b
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/test_backend_ldap_pool.py
@@ -0,0 +1,244 @@
+# -*- coding: utf-8 -*-
+# Copyright 2012 OpenStack Foundation
+# Copyright 2013 IBM Corp.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import ldappool
+import mock
+from oslo_config import cfg
+from oslotest import mockpatch
+
+from keystone.common.ldap import core as ldap_core
+from keystone.identity.backends import ldap
+from keystone.tests import unit as tests
+from keystone.tests.unit import fakeldap
+from keystone.tests.unit import test_backend_ldap
+
+CONF = cfg.CONF
+
+
+class LdapPoolCommonTestMixin(object):
+ """LDAP pool specific common tests used here and in live tests."""
+
+ def cleanup_pools(self):
+ ldap_core.PooledLDAPHandler.connection_pools.clear()
+
+ def test_handler_with_use_pool_enabled(self):
+ # by default use_pool and use_auth_pool is enabled in test pool config
+ user_ref = self.identity_api.get_user(self.user_foo['id'])
+ self.user_foo.pop('password')
+ self.assertDictEqual(user_ref, self.user_foo)
+
+ handler = ldap_core._get_connection(CONF.ldap.url, use_pool=True)
+ self.assertIsInstance(handler, ldap_core.PooledLDAPHandler)
+
+ @mock.patch.object(ldap_core.KeystoneLDAPHandler, 'connect')
+ @mock.patch.object(ldap_core.KeystoneLDAPHandler, 'simple_bind_s')
+ def test_handler_with_use_pool_not_enabled(self, bind_method,
+ connect_method):
+ self.config_fixture.config(group='ldap', use_pool=False)
+ self.config_fixture.config(group='ldap', use_auth_pool=True)
+ self.cleanup_pools()
+
+ user_api = ldap.UserApi(CONF)
+ handler = user_api.get_connection(user=None, password=None,
+ end_user_auth=True)
+ # use_auth_pool flag does not matter when use_pool is False
+ # still handler is non pool version
+ self.assertIsInstance(handler.conn, ldap_core.PythonLDAPHandler)
+
+ @mock.patch.object(ldap_core.KeystoneLDAPHandler, 'connect')
+ @mock.patch.object(ldap_core.KeystoneLDAPHandler, 'simple_bind_s')
+ def test_handler_with_end_user_auth_use_pool_not_enabled(self, bind_method,
+ connect_method):
+ # by default use_pool is enabled in test pool config
+ # now disabling use_auth_pool flag to test handler instance
+ self.config_fixture.config(group='ldap', use_auth_pool=False)
+ self.cleanup_pools()
+
+ user_api = ldap.UserApi(CONF)
+ handler = user_api.get_connection(user=None, password=None,
+ end_user_auth=True)
+ self.assertIsInstance(handler.conn, ldap_core.PythonLDAPHandler)
+
+ # For end_user_auth case, flag should not be false otherwise
+ # it will use, admin connections ldap pool
+ handler = user_api.get_connection(user=None, password=None,
+ end_user_auth=False)
+ self.assertIsInstance(handler.conn, ldap_core.PooledLDAPHandler)
+
+ def test_pool_size_set(self):
+ # get related connection manager instance
+ ldappool_cm = self.conn_pools[CONF.ldap.url]
+ self.assertEqual(CONF.ldap.pool_size, ldappool_cm.size)
+
+ def test_pool_retry_max_set(self):
+ # get related connection manager instance
+ ldappool_cm = self.conn_pools[CONF.ldap.url]
+ self.assertEqual(CONF.ldap.pool_retry_max, ldappool_cm.retry_max)
+
+ def test_pool_retry_delay_set(self):
+ # just make one identity call to initiate ldap connection if not there
+ self.identity_api.get_user(self.user_foo['id'])
+
+ # get related connection manager instance
+ ldappool_cm = self.conn_pools[CONF.ldap.url]
+ self.assertEqual(CONF.ldap.pool_retry_delay, ldappool_cm.retry_delay)
+
+ def test_pool_use_tls_set(self):
+ # get related connection manager instance
+ ldappool_cm = self.conn_pools[CONF.ldap.url]
+ self.assertEqual(CONF.ldap.use_tls, ldappool_cm.use_tls)
+
+ def test_pool_timeout_set(self):
+ # get related connection manager instance
+ ldappool_cm = self.conn_pools[CONF.ldap.url]
+ self.assertEqual(CONF.ldap.pool_connection_timeout,
+ ldappool_cm.timeout)
+
+ def test_pool_use_pool_set(self):
+ # get related connection manager instance
+ ldappool_cm = self.conn_pools[CONF.ldap.url]
+ self.assertEqual(CONF.ldap.use_pool, ldappool_cm.use_pool)
+
+ def test_pool_connection_lifetime_set(self):
+ # get related connection manager instance
+ ldappool_cm = self.conn_pools[CONF.ldap.url]
+ self.assertEqual(CONF.ldap.pool_connection_lifetime,
+ ldappool_cm.max_lifetime)
+
+ def test_max_connection_error_raised(self):
+
+ who = CONF.ldap.user
+ cred = CONF.ldap.password
+ # get related connection manager instance
+ ldappool_cm = self.conn_pools[CONF.ldap.url]
+ ldappool_cm.size = 2
+
+ # 3rd connection attempt should raise Max connection error
+ with ldappool_cm.connection(who, cred) as _: # conn1
+ with ldappool_cm.connection(who, cred) as _: # conn2
+ try:
+ with ldappool_cm.connection(who, cred) as _: # conn3
+ _.unbind_s()
+ self.fail()
+ except Exception as ex:
+ self.assertIsInstance(ex,
+ ldappool.MaxConnectionReachedError)
+ ldappool_cm.size = CONF.ldap.pool_size
+
+ def test_pool_size_expands_correctly(self):
+
+ who = CONF.ldap.user
+ cred = CONF.ldap.password
+ # get related connection manager instance
+ ldappool_cm = self.conn_pools[CONF.ldap.url]
+ ldappool_cm.size = 3
+
+ def _get_conn():
+ return ldappool_cm.connection(who, cred)
+
+ # Open 3 connections first
+ with _get_conn() as _: # conn1
+ self.assertEqual(len(ldappool_cm), 1)
+ with _get_conn() as _: # conn2
+ self.assertEqual(len(ldappool_cm), 2)
+ with _get_conn() as _: # conn2
+ _.unbind_ext_s()
+ self.assertEqual(len(ldappool_cm), 3)
+
+ # Then open 3 connections again and make sure size does not grow
+ # over 3
+ with _get_conn() as _: # conn1
+ self.assertEqual(len(ldappool_cm), 1)
+ with _get_conn() as _: # conn2
+ self.assertEqual(len(ldappool_cm), 2)
+ with _get_conn() as _: # conn3
+ _.unbind_ext_s()
+ self.assertEqual(len(ldappool_cm), 3)
+
+ def test_password_change_with_pool(self):
+ old_password = self.user_sna['password']
+ self.cleanup_pools()
+
+ # authenticate so that connection is added to pool before password
+ # change
+ user_ref = self.identity_api.authenticate(
+ context={},
+ user_id=self.user_sna['id'],
+ password=self.user_sna['password'])
+
+ self.user_sna.pop('password')
+ self.user_sna['enabled'] = True
+ self.assertDictEqual(user_ref, self.user_sna)
+
+ new_password = 'new_password'
+ user_ref['password'] = new_password
+ self.identity_api.update_user(user_ref['id'], user_ref)
+
+ # now authenticate again to make sure new password works with
+ # conneciton pool
+ user_ref2 = self.identity_api.authenticate(
+ context={},
+ user_id=self.user_sna['id'],
+ password=new_password)
+
+ user_ref.pop('password')
+ self.assertDictEqual(user_ref, user_ref2)
+
+ # Authentication with old password would not work here as there
+ # is only one connection in pool which get bind again with updated
+ # password..so no old bind is maintained in this case.
+ self.assertRaises(AssertionError,
+ self.identity_api.authenticate,
+ context={},
+ user_id=self.user_sna['id'],
+ password=old_password)
+
+
+class LdapIdentitySqlAssignment(LdapPoolCommonTestMixin,
+ test_backend_ldap.LdapIdentitySqlAssignment,
+ tests.TestCase):
+ '''Executes existing base class 150+ tests with pooled LDAP handler to make
+ sure it works without any error.
+ '''
+ def setUp(self):
+ self.useFixture(mockpatch.PatchObject(
+ ldap_core.PooledLDAPHandler, 'Connector', fakeldap.FakeLdapPool))
+ super(LdapIdentitySqlAssignment, self).setUp()
+
+ self.addCleanup(self.cleanup_pools)
+ # storing to local variable to avoid long references
+ self.conn_pools = ldap_core.PooledLDAPHandler.connection_pools
+ # super class loads db fixtures which establishes ldap connection
+ # so adding dummy call to highlight connection pool initialization
+ # as its not that obvious though its not needed here
+ self.identity_api.get_user(self.user_foo['id'])
+
+ def config_files(self):
+ config_files = super(LdapIdentitySqlAssignment, self).config_files()
+ config_files.append(tests.dirs.tests_conf('backend_ldap_pool.conf'))
+ return config_files
+
+ @mock.patch.object(ldap_core, 'utf8_encode')
+ def test_utf8_encoded_is_used_in_pool(self, mocked_method):
+ def side_effect(arg):
+ return arg
+ mocked_method.side_effect = side_effect
+ # invalidate the cache to get utf8_encode function called.
+ self.identity_api.get_user.invalidate(self.identity_api,
+ self.user_foo['id'])
+ self.identity_api.get_user(self.user_foo['id'])
+ mocked_method.assert_any_call(CONF.ldap.user)
+ mocked_method.assert_any_call(CONF.ldap.password)
diff --git a/keystone-moon/keystone/tests/unit/test_backend_rules.py b/keystone-moon/keystone/tests/unit/test_backend_rules.py
new file mode 100644
index 00000000..c9c4f151
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/test_backend_rules.py
@@ -0,0 +1,62 @@
+# Copyright 2013 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+
+from keystone import exception
+from keystone.tests import unit as tests
+from keystone.tests.unit import test_backend
+
+
+class RulesPolicy(tests.TestCase, test_backend.PolicyTests):
+ def setUp(self):
+ super(RulesPolicy, self).setUp()
+ self.load_backends()
+
+ def config_overrides(self):
+ super(RulesPolicy, self).config_overrides()
+ self.config_fixture.config(
+ group='policy',
+ driver='keystone.policy.backends.rules.Policy')
+
+ def test_create(self):
+ self.assertRaises(exception.NotImplemented,
+ super(RulesPolicy, self).test_create)
+
+ def test_get(self):
+ self.assertRaises(exception.NotImplemented,
+ super(RulesPolicy, self).test_get)
+
+ def test_list(self):
+ self.assertRaises(exception.NotImplemented,
+ super(RulesPolicy, self).test_list)
+
+ def test_update(self):
+ self.assertRaises(exception.NotImplemented,
+ super(RulesPolicy, self).test_update)
+
+ def test_delete(self):
+ self.assertRaises(exception.NotImplemented,
+ super(RulesPolicy, self).test_delete)
+
+ def test_get_policy_404(self):
+ self.assertRaises(exception.NotImplemented,
+ super(RulesPolicy, self).test_get_policy_404)
+
+ def test_update_policy_404(self):
+ self.assertRaises(exception.NotImplemented,
+ super(RulesPolicy, self).test_update_policy_404)
+
+ def test_delete_policy_404(self):
+ self.assertRaises(exception.NotImplemented,
+ super(RulesPolicy, self).test_delete_policy_404)
diff --git a/keystone-moon/keystone/tests/unit/test_backend_sql.py b/keystone-moon/keystone/tests/unit/test_backend_sql.py
new file mode 100644
index 00000000..a7c63bf6
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/test_backend_sql.py
@@ -0,0 +1,948 @@
+# -*- coding: utf-8 -*-
+# Copyright 2012 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import functools
+import uuid
+
+import mock
+from oslo_config import cfg
+from oslo_db import exception as db_exception
+from oslo_db import options
+import sqlalchemy
+from sqlalchemy import exc
+from testtools import matchers
+
+from keystone.common import driver_hints
+from keystone.common import sql
+from keystone import exception
+from keystone.identity.backends import sql as identity_sql
+from keystone.openstack.common import versionutils
+from keystone.tests import unit as tests
+from keystone.tests.unit import default_fixtures
+from keystone.tests.unit.ksfixtures import database
+from keystone.tests.unit import test_backend
+from keystone.token.persistence.backends import sql as token_sql
+
+
+CONF = cfg.CONF
+DEFAULT_DOMAIN_ID = CONF.identity.default_domain_id
+
+
+class SqlTests(tests.SQLDriverOverrides, tests.TestCase):
+
+ def setUp(self):
+ super(SqlTests, self).setUp()
+ self.useFixture(database.Database())
+ self.load_backends()
+
+ # populate the engine with tables & fixtures
+ self.load_fixtures(default_fixtures)
+ # defaulted by the data load
+ self.user_foo['enabled'] = True
+
+ def config_files(self):
+ config_files = super(SqlTests, self).config_files()
+ config_files.append(tests.dirs.tests_conf('backend_sql.conf'))
+ return config_files
+
+
+class SqlModels(SqlTests):
+
+ def select_table(self, name):
+ table = sqlalchemy.Table(name,
+ sql.ModelBase.metadata,
+ autoload=True)
+ s = sqlalchemy.select([table])
+ return s
+
+ def assertExpectedSchema(self, table, cols):
+ table = self.select_table(table)
+ for col, type_, length in cols:
+ self.assertIsInstance(table.c[col].type, type_)
+ if length:
+ self.assertEqual(length, table.c[col].type.length)
+
+ def test_user_model(self):
+ cols = (('id', sql.String, 64),
+ ('name', sql.String, 255),
+ ('password', sql.String, 128),
+ ('domain_id', sql.String, 64),
+ ('enabled', sql.Boolean, None),
+ ('extra', sql.JsonBlob, None))
+ self.assertExpectedSchema('user', cols)
+
+ def test_group_model(self):
+ cols = (('id', sql.String, 64),
+ ('name', sql.String, 64),
+ ('description', sql.Text, None),
+ ('domain_id', sql.String, 64),
+ ('extra', sql.JsonBlob, None))
+ self.assertExpectedSchema('group', cols)
+
+ def test_domain_model(self):
+ cols = (('id', sql.String, 64),
+ ('name', sql.String, 64),
+ ('enabled', sql.Boolean, None))
+ self.assertExpectedSchema('domain', cols)
+
+ def test_project_model(self):
+ cols = (('id', sql.String, 64),
+ ('name', sql.String, 64),
+ ('description', sql.Text, None),
+ ('domain_id', sql.String, 64),
+ ('enabled', sql.Boolean, None),
+ ('extra', sql.JsonBlob, None),
+ ('parent_id', sql.String, 64))
+ self.assertExpectedSchema('project', cols)
+
+ def test_role_assignment_model(self):
+ cols = (('type', sql.Enum, None),
+ ('actor_id', sql.String, 64),
+ ('target_id', sql.String, 64),
+ ('role_id', sql.String, 64),
+ ('inherited', sql.Boolean, False))
+ self.assertExpectedSchema('assignment', cols)
+
+ def test_user_group_membership(self):
+ cols = (('group_id', sql.String, 64),
+ ('user_id', sql.String, 64))
+ self.assertExpectedSchema('user_group_membership', cols)
+
+
+class SqlIdentity(SqlTests, test_backend.IdentityTests):
+ def test_password_hashed(self):
+ session = sql.get_session()
+ user_ref = self.identity_api._get_user(session, self.user_foo['id'])
+ self.assertNotEqual(user_ref['password'], self.user_foo['password'])
+
+ def test_delete_user_with_project_association(self):
+ user = {'name': uuid.uuid4().hex,
+ 'domain_id': DEFAULT_DOMAIN_ID,
+ 'password': uuid.uuid4().hex}
+ user = self.identity_api.create_user(user)
+ self.assignment_api.add_user_to_project(self.tenant_bar['id'],
+ user['id'])
+ self.identity_api.delete_user(user['id'])
+ self.assertRaises(exception.UserNotFound,
+ self.assignment_api.list_projects_for_user,
+ user['id'])
+
+ def test_create_null_user_name(self):
+ user = {'name': None,
+ 'domain_id': DEFAULT_DOMAIN_ID,
+ 'password': uuid.uuid4().hex}
+ self.assertRaises(exception.ValidationError,
+ self.identity_api.create_user,
+ user)
+ self.assertRaises(exception.UserNotFound,
+ self.identity_api.get_user_by_name,
+ user['name'],
+ DEFAULT_DOMAIN_ID)
+
+ def test_create_user_case_sensitivity(self):
+ # user name case sensitivity is down to the fact that it is marked as
+ # an SQL UNIQUE column, which may not be valid for other backends, like
+ # LDAP.
+
+ # create a ref with a lowercase name
+ ref = {
+ 'name': uuid.uuid4().hex.lower(),
+ 'domain_id': DEFAULT_DOMAIN_ID}
+ ref = self.identity_api.create_user(ref)
+
+ # assign a new ID with the same name, but this time in uppercase
+ ref['name'] = ref['name'].upper()
+ self.identity_api.create_user(ref)
+
+ def test_create_project_case_sensitivity(self):
+ # project name case sensitivity is down to the fact that it is marked
+ # as an SQL UNIQUE column, which may not be valid for other backends,
+ # like LDAP.
+
+ # create a ref with a lowercase name
+ ref = {
+ 'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex.lower(),
+ 'domain_id': DEFAULT_DOMAIN_ID}
+ self.resource_api.create_project(ref['id'], ref)
+
+ # assign a new ID with the same name, but this time in uppercase
+ ref['id'] = uuid.uuid4().hex
+ ref['name'] = ref['name'].upper()
+ self.resource_api.create_project(ref['id'], ref)
+
+ def test_create_null_project_name(self):
+ tenant = {'id': uuid.uuid4().hex,
+ 'name': None,
+ 'domain_id': DEFAULT_DOMAIN_ID}
+ self.assertRaises(exception.ValidationError,
+ self.resource_api.create_project,
+ tenant['id'],
+ tenant)
+ self.assertRaises(exception.ProjectNotFound,
+ self.resource_api.get_project,
+ tenant['id'])
+ self.assertRaises(exception.ProjectNotFound,
+ self.resource_api.get_project_by_name,
+ tenant['name'],
+ DEFAULT_DOMAIN_ID)
+
+ def test_delete_project_with_user_association(self):
+ user = {'name': 'fakeuser',
+ 'domain_id': DEFAULT_DOMAIN_ID,
+ 'password': 'passwd'}
+ user = self.identity_api.create_user(user)
+ self.assignment_api.add_user_to_project(self.tenant_bar['id'],
+ user['id'])
+ self.resource_api.delete_project(self.tenant_bar['id'])
+ tenants = self.assignment_api.list_projects_for_user(user['id'])
+ self.assertEqual([], tenants)
+
+ def test_metadata_removed_on_delete_user(self):
+ # A test to check that the internal representation
+ # or roles is correctly updated when a user is deleted
+ user = {'name': uuid.uuid4().hex,
+ 'domain_id': DEFAULT_DOMAIN_ID,
+ 'password': 'passwd'}
+ user = self.identity_api.create_user(user)
+ role = {'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex}
+ self.role_api.create_role(role['id'], role)
+ self.assignment_api.add_role_to_user_and_project(
+ user['id'],
+ self.tenant_bar['id'],
+ role['id'])
+ self.identity_api.delete_user(user['id'])
+
+ # Now check whether the internal representation of roles
+ # has been deleted
+ self.assertRaises(exception.MetadataNotFound,
+ self.assignment_api._get_metadata,
+ user['id'],
+ self.tenant_bar['id'])
+
+ def test_metadata_removed_on_delete_project(self):
+ # A test to check that the internal representation
+ # or roles is correctly updated when a project is deleted
+ user = {'name': uuid.uuid4().hex,
+ 'domain_id': DEFAULT_DOMAIN_ID,
+ 'password': 'passwd'}
+ user = self.identity_api.create_user(user)
+ role = {'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex}
+ self.role_api.create_role(role['id'], role)
+ self.assignment_api.add_role_to_user_and_project(
+ user['id'],
+ self.tenant_bar['id'],
+ role['id'])
+ self.resource_api.delete_project(self.tenant_bar['id'])
+
+ # Now check whether the internal representation of roles
+ # has been deleted
+ self.assertRaises(exception.MetadataNotFound,
+ self.assignment_api._get_metadata,
+ user['id'],
+ self.tenant_bar['id'])
+
+ def test_update_project_returns_extra(self):
+ """This tests for backwards-compatibility with an essex/folsom bug.
+
+ Non-indexed attributes were returned in an 'extra' attribute, instead
+ of on the entity itself; for consistency and backwards compatibility,
+ those attributes should be included twice.
+
+ This behavior is specific to the SQL driver.
+
+ """
+ tenant_id = uuid.uuid4().hex
+ arbitrary_key = uuid.uuid4().hex
+ arbitrary_value = uuid.uuid4().hex
+ tenant = {
+ 'id': tenant_id,
+ 'name': uuid.uuid4().hex,
+ 'domain_id': DEFAULT_DOMAIN_ID,
+ arbitrary_key: arbitrary_value}
+ ref = self.resource_api.create_project(tenant_id, tenant)
+ self.assertEqual(arbitrary_value, ref[arbitrary_key])
+ self.assertIsNone(ref.get('extra'))
+
+ tenant['name'] = uuid.uuid4().hex
+ ref = self.resource_api.update_project(tenant_id, tenant)
+ self.assertEqual(arbitrary_value, ref[arbitrary_key])
+ self.assertEqual(arbitrary_value, ref['extra'][arbitrary_key])
+
+ def test_update_user_returns_extra(self):
+ """This tests for backwards-compatibility with an essex/folsom bug.
+
+ Non-indexed attributes were returned in an 'extra' attribute, instead
+ of on the entity itself; for consistency and backwards compatibility,
+ those attributes should be included twice.
+
+ This behavior is specific to the SQL driver.
+
+ """
+ arbitrary_key = uuid.uuid4().hex
+ arbitrary_value = uuid.uuid4().hex
+ user = {
+ 'name': uuid.uuid4().hex,
+ 'domain_id': DEFAULT_DOMAIN_ID,
+ 'password': uuid.uuid4().hex,
+ arbitrary_key: arbitrary_value}
+ ref = self.identity_api.create_user(user)
+ self.assertEqual(arbitrary_value, ref[arbitrary_key])
+ self.assertIsNone(ref.get('password'))
+ self.assertIsNone(ref.get('extra'))
+
+ user['name'] = uuid.uuid4().hex
+ user['password'] = uuid.uuid4().hex
+ ref = self.identity_api.update_user(ref['id'], user)
+ self.assertIsNone(ref.get('password'))
+ self.assertIsNone(ref['extra'].get('password'))
+ self.assertEqual(arbitrary_value, ref[arbitrary_key])
+ self.assertEqual(arbitrary_value, ref['extra'][arbitrary_key])
+
+ def test_sql_user_to_dict_null_default_project_id(self):
+ user = {
+ 'name': uuid.uuid4().hex,
+ 'domain_id': DEFAULT_DOMAIN_ID,
+ 'password': uuid.uuid4().hex}
+
+ user = self.identity_api.create_user(user)
+ session = sql.get_session()
+ query = session.query(identity_sql.User)
+ query = query.filter_by(id=user['id'])
+ raw_user_ref = query.one()
+ self.assertIsNone(raw_user_ref.default_project_id)
+ user_ref = raw_user_ref.to_dict()
+ self.assertNotIn('default_project_id', user_ref)
+ session.close()
+
+ def test_list_domains_for_user(self):
+ domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.resource_api.create_domain(domain['id'], domain)
+ user = {'name': uuid.uuid4().hex, 'password': uuid.uuid4().hex,
+ 'domain_id': domain['id'], 'enabled': True}
+
+ test_domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.resource_api.create_domain(test_domain1['id'], test_domain1)
+ test_domain2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.resource_api.create_domain(test_domain2['id'], test_domain2)
+
+ user = self.identity_api.create_user(user)
+ user_domains = self.assignment_api.list_domains_for_user(user['id'])
+ self.assertEqual(0, len(user_domains))
+ self.assignment_api.create_grant(user_id=user['id'],
+ domain_id=test_domain1['id'],
+ role_id=self.role_member['id'])
+ self.assignment_api.create_grant(user_id=user['id'],
+ domain_id=test_domain2['id'],
+ role_id=self.role_member['id'])
+ user_domains = self.assignment_api.list_domains_for_user(user['id'])
+ self.assertThat(user_domains, matchers.HasLength(2))
+
+ def test_list_domains_for_user_with_grants(self):
+ # Create two groups each with a role on a different domain, and
+ # make user1 a member of both groups. Both these new domains
+ # should now be included, along with any direct user grants.
+ domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.resource_api.create_domain(domain['id'], domain)
+ user = {'name': uuid.uuid4().hex, 'password': uuid.uuid4().hex,
+ 'domain_id': domain['id'], 'enabled': True}
+ user = self.identity_api.create_user(user)
+ group1 = {'name': uuid.uuid4().hex, 'domain_id': domain['id']}
+ group1 = self.identity_api.create_group(group1)
+ group2 = {'name': uuid.uuid4().hex, 'domain_id': domain['id']}
+ group2 = self.identity_api.create_group(group2)
+
+ test_domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.resource_api.create_domain(test_domain1['id'], test_domain1)
+ test_domain2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.resource_api.create_domain(test_domain2['id'], test_domain2)
+ test_domain3 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.resource_api.create_domain(test_domain3['id'], test_domain3)
+
+ self.identity_api.add_user_to_group(user['id'], group1['id'])
+ self.identity_api.add_user_to_group(user['id'], group2['id'])
+
+ # Create 3 grants, one user grant, the other two as group grants
+ self.assignment_api.create_grant(user_id=user['id'],
+ domain_id=test_domain1['id'],
+ role_id=self.role_member['id'])
+ self.assignment_api.create_grant(group_id=group1['id'],
+ domain_id=test_domain2['id'],
+ role_id=self.role_admin['id'])
+ self.assignment_api.create_grant(group_id=group2['id'],
+ domain_id=test_domain3['id'],
+ role_id=self.role_admin['id'])
+ user_domains = self.assignment_api.list_domains_for_user(user['id'])
+ self.assertThat(user_domains, matchers.HasLength(3))
+
+ def test_list_domains_for_user_with_inherited_grants(self):
+ """Test that inherited roles on the domain are excluded.
+
+ Test Plan:
+
+ - Create two domains, one user, group and role
+ - Domain1 is given an inherited user role, Domain2 an inherited
+ group role (for a group of which the user is a member)
+ - When listing domains for user, neither domain should be returned
+
+ """
+ domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ domain1 = self.resource_api.create_domain(domain1['id'], domain1)
+ domain2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ domain2 = self.resource_api.create_domain(domain2['id'], domain2)
+ user = {'name': uuid.uuid4().hex, 'password': uuid.uuid4().hex,
+ 'domain_id': domain1['id'], 'enabled': True}
+ user = self.identity_api.create_user(user)
+ group = {'name': uuid.uuid4().hex, 'domain_id': domain1['id']}
+ group = self.identity_api.create_group(group)
+ self.identity_api.add_user_to_group(user['id'], group['id'])
+ role = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.role_api.create_role(role['id'], role)
+
+ # Create a grant on each domain, one user grant, one group grant,
+ # both inherited.
+ self.assignment_api.create_grant(user_id=user['id'],
+ domain_id=domain1['id'],
+ role_id=role['id'],
+ inherited_to_projects=True)
+ self.assignment_api.create_grant(group_id=group['id'],
+ domain_id=domain2['id'],
+ role_id=role['id'],
+ inherited_to_projects=True)
+
+ user_domains = self.assignment_api.list_domains_for_user(user['id'])
+ # No domains should be returned since both domains have only inherited
+ # roles assignments.
+ self.assertThat(user_domains, matchers.HasLength(0))
+
+
+class SqlTrust(SqlTests, test_backend.TrustTests):
+ pass
+
+
+class SqlToken(SqlTests, test_backend.TokenTests):
+ def test_token_revocation_list_uses_right_columns(self):
+ # This query used to be heavy with too many columns. We want
+ # to make sure it is only running with the minimum columns
+ # necessary.
+
+ expected_query_args = (token_sql.TokenModel.id,
+ token_sql.TokenModel.expires)
+
+ with mock.patch.object(token_sql, 'sql') as mock_sql:
+ tok = token_sql.Token()
+ tok.list_revoked_tokens()
+
+ mock_query = mock_sql.get_session().query
+ mock_query.assert_called_with(*expected_query_args)
+
+ def test_flush_expired_tokens_batch(self):
+ # TODO(dstanek): This test should be rewritten to be less
+ # brittle. The code will likely need to be changed first. I
+ # just copied the spirit of the existing test when I rewrote
+ # mox -> mock. These tests are brittle because they have the
+ # call structure for SQLAlchemy encoded in them.
+
+ # test sqlite dialect
+ with mock.patch.object(token_sql, 'sql') as mock_sql:
+ mock_sql.get_session().bind.dialect.name = 'sqlite'
+ tok = token_sql.Token()
+ tok.flush_expired_tokens()
+
+ filter_mock = mock_sql.get_session().query().filter()
+ self.assertFalse(filter_mock.limit.called)
+ self.assertTrue(filter_mock.delete.called_once)
+
+ def test_flush_expired_tokens_batch_mysql(self):
+ # test mysql dialect, we don't need to test IBM DB SA separately, since
+ # other tests below test the differences between how they use the batch
+ # strategy
+ with mock.patch.object(token_sql, 'sql') as mock_sql:
+ mock_sql.get_session().query().filter().delete.return_value = 0
+ mock_sql.get_session().bind.dialect.name = 'mysql'
+ tok = token_sql.Token()
+ expiry_mock = mock.Mock()
+ ITERS = [1, 2, 3]
+ expiry_mock.return_value = iter(ITERS)
+ token_sql._expiry_range_batched = expiry_mock
+ tok.flush_expired_tokens()
+
+ # The expiry strategy is only invoked once, the other calls are via
+ # the yield return.
+ self.assertEqual(1, expiry_mock.call_count)
+ mock_delete = mock_sql.get_session().query().filter().delete
+ self.assertThat(mock_delete.call_args_list,
+ matchers.HasLength(len(ITERS)))
+
+ def test_expiry_range_batched(self):
+ upper_bound_mock = mock.Mock(side_effect=[1, "final value"])
+ sess_mock = mock.Mock()
+ query_mock = sess_mock.query().filter().order_by().offset().limit()
+ query_mock.one.side_effect = [['test'], sql.NotFound()]
+ for i, x in enumerate(token_sql._expiry_range_batched(sess_mock,
+ upper_bound_mock,
+ batch_size=50)):
+ if i == 0:
+ # The first time the batch iterator returns, it should return
+ # the first result that comes back from the database.
+ self.assertEqual(x, 'test')
+ elif i == 1:
+ # The second time, the database range function should return
+ # nothing, so the batch iterator returns the result of the
+ # upper_bound function
+ self.assertEqual(x, "final value")
+ else:
+ self.fail("range batch function returned more than twice")
+
+ def test_expiry_range_strategy_sqlite(self):
+ tok = token_sql.Token()
+ sqlite_strategy = tok._expiry_range_strategy('sqlite')
+ self.assertEqual(token_sql._expiry_range_all, sqlite_strategy)
+
+ def test_expiry_range_strategy_ibm_db_sa(self):
+ tok = token_sql.Token()
+ db2_strategy = tok._expiry_range_strategy('ibm_db_sa')
+ self.assertIsInstance(db2_strategy, functools.partial)
+ self.assertEqual(db2_strategy.func, token_sql._expiry_range_batched)
+ self.assertEqual(db2_strategy.keywords, {'batch_size': 100})
+
+ def test_expiry_range_strategy_mysql(self):
+ tok = token_sql.Token()
+ mysql_strategy = tok._expiry_range_strategy('mysql')
+ self.assertIsInstance(mysql_strategy, functools.partial)
+ self.assertEqual(mysql_strategy.func, token_sql._expiry_range_batched)
+ self.assertEqual(mysql_strategy.keywords, {'batch_size': 1000})
+
+
+class SqlCatalog(SqlTests, test_backend.CatalogTests):
+
+ _legacy_endpoint_id_in_endpoint = True
+ _enabled_default_to_true_when_creating_endpoint = True
+
+ def test_catalog_ignored_malformed_urls(self):
+ service = {
+ 'id': uuid.uuid4().hex,
+ 'type': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'description': uuid.uuid4().hex,
+ }
+ self.catalog_api.create_service(service['id'], service.copy())
+
+ malformed_url = "http://192.168.1.104:8774/v2/$(tenant)s"
+ endpoint = {
+ 'id': uuid.uuid4().hex,
+ 'region_id': None,
+ 'service_id': service['id'],
+ 'interface': 'public',
+ 'url': malformed_url,
+ }
+ self.catalog_api.create_endpoint(endpoint['id'], endpoint.copy())
+
+ # NOTE(dstanek): there are no valid URLs, so nothing is in the catalog
+ catalog = self.catalog_api.get_catalog('fake-user', 'fake-tenant')
+ self.assertEqual({}, catalog)
+
+ def test_get_catalog_with_empty_public_url(self):
+ service = {
+ 'id': uuid.uuid4().hex,
+ 'type': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'description': uuid.uuid4().hex,
+ }
+ self.catalog_api.create_service(service['id'], service.copy())
+
+ endpoint = {
+ 'id': uuid.uuid4().hex,
+ 'region_id': None,
+ 'interface': 'public',
+ 'url': '',
+ 'service_id': service['id'],
+ }
+ self.catalog_api.create_endpoint(endpoint['id'], endpoint.copy())
+
+ catalog = self.catalog_api.get_catalog('user', 'tenant')
+ catalog_endpoint = catalog[endpoint['region_id']][service['type']]
+ self.assertEqual(service['name'], catalog_endpoint['name'])
+ self.assertEqual(endpoint['id'], catalog_endpoint['id'])
+ self.assertEqual('', catalog_endpoint['publicURL'])
+ self.assertIsNone(catalog_endpoint.get('adminURL'))
+ self.assertIsNone(catalog_endpoint.get('internalURL'))
+
+ def test_create_endpoint_region_404(self):
+ service = {
+ 'id': uuid.uuid4().hex,
+ 'type': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'description': uuid.uuid4().hex,
+ }
+ self.catalog_api.create_service(service['id'], service.copy())
+
+ endpoint = {
+ 'id': uuid.uuid4().hex,
+ 'region_id': uuid.uuid4().hex,
+ 'service_id': service['id'],
+ 'interface': 'public',
+ 'url': uuid.uuid4().hex,
+ }
+
+ self.assertRaises(exception.ValidationError,
+ self.catalog_api.create_endpoint,
+ endpoint['id'],
+ endpoint.copy())
+
+ def test_create_region_invalid_id(self):
+ region = {
+ 'id': '0' * 256,
+ 'description': '',
+ 'extra': {},
+ }
+
+ self.assertRaises(exception.StringLengthExceeded,
+ self.catalog_api.create_region,
+ region.copy())
+
+ def test_create_region_invalid_parent_id(self):
+ region = {
+ 'id': uuid.uuid4().hex,
+ 'parent_region_id': '0' * 256,
+ }
+
+ self.assertRaises(exception.RegionNotFound,
+ self.catalog_api.create_region,
+ region)
+
+ def test_delete_region_with_endpoint(self):
+ # create a region
+ region = {
+ 'id': uuid.uuid4().hex,
+ 'description': uuid.uuid4().hex,
+ }
+ self.catalog_api.create_region(region)
+
+ # create a child region
+ child_region = {
+ 'id': uuid.uuid4().hex,
+ 'description': uuid.uuid4().hex,
+ 'parent_id': region['id']
+ }
+ self.catalog_api.create_region(child_region)
+ # create a service
+ service = {
+ 'id': uuid.uuid4().hex,
+ 'type': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'description': uuid.uuid4().hex,
+ }
+ self.catalog_api.create_service(service['id'], service)
+
+ # create an endpoint attached to the service and child region
+ child_endpoint = {
+ 'id': uuid.uuid4().hex,
+ 'region_id': child_region['id'],
+ 'interface': uuid.uuid4().hex[:8],
+ 'url': uuid.uuid4().hex,
+ 'service_id': service['id'],
+ }
+ self.catalog_api.create_endpoint(child_endpoint['id'], child_endpoint)
+ self.assertRaises(exception.RegionDeletionError,
+ self.catalog_api.delete_region,
+ child_region['id'])
+
+ # create an endpoint attached to the service and parent region
+ endpoint = {
+ 'id': uuid.uuid4().hex,
+ 'region_id': region['id'],
+ 'interface': uuid.uuid4().hex[:8],
+ 'url': uuid.uuid4().hex,
+ 'service_id': service['id'],
+ }
+ self.catalog_api.create_endpoint(endpoint['id'], endpoint)
+ self.assertRaises(exception.RegionDeletionError,
+ self.catalog_api.delete_region,
+ region['id'])
+
+
+class SqlPolicy(SqlTests, test_backend.PolicyTests):
+ pass
+
+
+class SqlInheritance(SqlTests, test_backend.InheritanceTests):
+ pass
+
+
+class SqlTokenCacheInvalidation(SqlTests, test_backend.TokenCacheInvalidation):
+ def setUp(self):
+ super(SqlTokenCacheInvalidation, self).setUp()
+ self._create_test_data()
+
+
+class SqlFilterTests(SqlTests, test_backend.FilterTests):
+
+ def clean_up_entities(self):
+ """Clean up entity test data from Filter Test Cases."""
+
+ for entity in ['user', 'group', 'project']:
+ self._delete_test_data(entity, self.entity_list[entity])
+ self._delete_test_data(entity, self.domain1_entity_list[entity])
+ del self.entity_list
+ del self.domain1_entity_list
+ self.domain1['enabled'] = False
+ self.resource_api.update_domain(self.domain1['id'], self.domain1)
+ self.resource_api.delete_domain(self.domain1['id'])
+ del self.domain1
+
+ def test_list_entities_filtered_by_domain(self):
+ # NOTE(henry-nash): This method is here rather than in test_backend
+ # since any domain filtering with LDAP is handled by the manager
+ # layer (and is already tested elsewhere) not at the driver level.
+ self.addCleanup(self.clean_up_entities)
+ self.domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.resource_api.create_domain(self.domain1['id'], self.domain1)
+
+ self.entity_list = {}
+ self.domain1_entity_list = {}
+ for entity in ['user', 'group', 'project']:
+ # Create 5 entities, 3 of which are in domain1
+ DOMAIN1_ENTITIES = 3
+ self.entity_list[entity] = self._create_test_data(entity, 2)
+ self.domain1_entity_list[entity] = self._create_test_data(
+ entity, DOMAIN1_ENTITIES, self.domain1['id'])
+
+ # Should get back the DOMAIN1_ENTITIES in domain1
+ hints = driver_hints.Hints()
+ hints.add_filter('domain_id', self.domain1['id'])
+ entities = self._list_entities(entity)(hints=hints)
+ self.assertEqual(DOMAIN1_ENTITIES, len(entities))
+ self._match_with_list(entities, self.domain1_entity_list[entity])
+ # Check the driver has removed the filter from the list hints
+ self.assertFalse(hints.get_exact_filter_by_name('domain_id'))
+
+ def test_filter_sql_injection_attack(self):
+ """Test against sql injection attack on filters
+
+ Test Plan:
+ - Attempt to get all entities back by passing a two-term attribute
+ - Attempt to piggyback filter to damage DB (e.g. drop table)
+
+ """
+ # Check we have some users
+ users = self.identity_api.list_users()
+ self.assertTrue(len(users) > 0)
+
+ hints = driver_hints.Hints()
+ hints.add_filter('name', "anything' or 'x'='x")
+ users = self.identity_api.list_users(hints=hints)
+ self.assertEqual(0, len(users))
+
+ # See if we can add a SQL command...use the group table instead of the
+ # user table since 'user' is reserved word for SQLAlchemy.
+ group = {'name': uuid.uuid4().hex, 'domain_id': DEFAULT_DOMAIN_ID}
+ group = self.identity_api.create_group(group)
+
+ hints = driver_hints.Hints()
+ hints.add_filter('name', "x'; drop table group")
+ groups = self.identity_api.list_groups(hints=hints)
+ self.assertEqual(0, len(groups))
+
+ groups = self.identity_api.list_groups()
+ self.assertTrue(len(groups) > 0)
+
+ def test_groups_for_user_filtered(self):
+ # The SQL identity driver currently does not support filtering on the
+ # listing groups for a given user, so will fail this test. This is
+ # raised as bug #1412447.
+ try:
+ super(SqlFilterTests, self).test_groups_for_user_filtered()
+ except matchers.MismatchError:
+ return
+ # We shouldn't get here...if we do, it means someone has fixed the
+ # above defect, so we can remove this test override. As an aside, it
+ # would be nice to have used self.assertRaises() around the call above
+ # to achieve the logic here...but that does not seem to work when
+ # wrapping another assert (it won't seem to catch the error).
+ self.assertTrue(False)
+
+
+class SqlLimitTests(SqlTests, test_backend.LimitTests):
+ def setUp(self):
+ super(SqlLimitTests, self).setUp()
+ test_backend.LimitTests.setUp(self)
+
+
+class FakeTable(sql.ModelBase):
+ __tablename__ = 'test_table'
+ col = sql.Column(sql.String(32), primary_key=True)
+
+ @sql.handle_conflicts('keystone')
+ def insert(self):
+ raise db_exception.DBDuplicateEntry
+
+ @sql.handle_conflicts('keystone')
+ def update(self):
+ raise db_exception.DBError(
+ inner_exception=exc.IntegrityError('a', 'a', 'a'))
+
+ @sql.handle_conflicts('keystone')
+ def lookup(self):
+ raise KeyError
+
+
+class SqlDecorators(tests.TestCase):
+
+ def test_initialization_fail(self):
+ self.assertRaises(exception.StringLengthExceeded,
+ FakeTable, col='a' * 64)
+
+ def test_initialization(self):
+ tt = FakeTable(col='a')
+ self.assertEqual('a', tt.col)
+
+ def test_non_ascii_init(self):
+ # NOTE(I159): Non ASCII characters must cause UnicodeDecodeError
+ # if encoding is not provided explicitly.
+ self.assertRaises(UnicodeDecodeError, FakeTable, col='Я')
+
+ def test_conflict_happend(self):
+ self.assertRaises(exception.Conflict, FakeTable().insert)
+ self.assertRaises(exception.UnexpectedError, FakeTable().update)
+
+ def test_not_conflict_error(self):
+ self.assertRaises(KeyError, FakeTable().lookup)
+
+
+class SqlModuleInitialization(tests.TestCase):
+
+ @mock.patch.object(sql.core, 'CONF')
+ @mock.patch.object(options, 'set_defaults')
+ def test_initialize_module(self, set_defaults, CONF):
+ sql.initialize()
+ set_defaults.assert_called_with(CONF,
+ connection='sqlite:///keystone.db')
+
+
+class SqlCredential(SqlTests):
+
+ def _create_credential_with_user_id(self, user_id=uuid.uuid4().hex):
+ credential_id = uuid.uuid4().hex
+ new_credential = {
+ 'id': credential_id,
+ 'user_id': user_id,
+ 'project_id': uuid.uuid4().hex,
+ 'blob': uuid.uuid4().hex,
+ 'type': uuid.uuid4().hex,
+ 'extra': uuid.uuid4().hex
+ }
+ self.credential_api.create_credential(credential_id, new_credential)
+ return new_credential
+
+ def _validateCredentialList(self, retrieved_credentials,
+ expected_credentials):
+ self.assertEqual(len(retrieved_credentials), len(expected_credentials))
+ retrived_ids = [c['id'] for c in retrieved_credentials]
+ for cred in expected_credentials:
+ self.assertIn(cred['id'], retrived_ids)
+
+ def setUp(self):
+ super(SqlCredential, self).setUp()
+ self.credentials = []
+ for _ in range(3):
+ self.credentials.append(
+ self._create_credential_with_user_id())
+ self.user_credentials = []
+ for _ in range(3):
+ cred = self._create_credential_with_user_id(self.user_foo['id'])
+ self.user_credentials.append(cred)
+ self.credentials.append(cred)
+
+ def test_list_credentials(self):
+ credentials = self.credential_api.list_credentials()
+ self._validateCredentialList(credentials, self.credentials)
+ # test filtering using hints
+ hints = driver_hints.Hints()
+ hints.add_filter('user_id', self.user_foo['id'])
+ credentials = self.credential_api.list_credentials(hints)
+ self._validateCredentialList(credentials, self.user_credentials)
+
+ def test_list_credentials_for_user(self):
+ credentials = self.credential_api.list_credentials_for_user(
+ self.user_foo['id'])
+ self._validateCredentialList(credentials, self.user_credentials)
+
+
+class DeprecatedDecorators(SqlTests):
+
+ def test_assignment_to_role_api(self):
+ """Test that calling one of the methods does call LOG.deprecated.
+
+ This method is really generic to the type of backend, but we need
+ one to execute the test, so the SQL backend is as good as any.
+
+ """
+
+ # Rather than try and check that a log message is issued, we
+ # enable fatal_deprecations so that we can check for the
+ # raising of the exception.
+
+ # First try to create a role without enabling fatal deprecations,
+ # which should work due to the cross manager deprecated calls.
+ role_ref = {
+ 'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex}
+ self.assignment_api.create_role(role_ref['id'], role_ref)
+ self.role_api.get_role(role_ref['id'])
+
+ # Now enable fatal exceptions - creating a role by calling the
+ # old manager should now fail.
+ self.config_fixture.config(fatal_deprecations=True)
+ role_ref = {
+ 'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex}
+ self.assertRaises(versionutils.DeprecatedConfig,
+ self.assignment_api.create_role,
+ role_ref['id'], role_ref)
+
+ def test_assignment_to_resource_api(self):
+ """Test that calling one of the methods does call LOG.deprecated.
+
+ This method is really generic to the type of backend, but we need
+ one to execute the test, so the SQL backend is as good as any.
+
+ """
+
+ # Rather than try and check that a log message is issued, we
+ # enable fatal_deprecations so that we can check for the
+ # raising of the exception.
+
+ # First try to create a project without enabling fatal deprecations,
+ # which should work due to the cross manager deprecated calls.
+ project_ref = {
+ 'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'domain_id': DEFAULT_DOMAIN_ID}
+ self.resource_api.create_project(project_ref['id'], project_ref)
+ self.resource_api.get_project(project_ref['id'])
+
+ # Now enable fatal exceptions - creating a project by calling the
+ # old manager should now fail.
+ self.config_fixture.config(fatal_deprecations=True)
+ project_ref = {
+ 'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'domain_id': DEFAULT_DOMAIN_ID}
+ self.assertRaises(versionutils.DeprecatedConfig,
+ self.assignment_api.create_project,
+ project_ref['id'], project_ref)
diff --git a/keystone-moon/keystone/tests/unit/test_backend_templated.py b/keystone-moon/keystone/tests/unit/test_backend_templated.py
new file mode 100644
index 00000000..a1c15fb1
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/test_backend_templated.py
@@ -0,0 +1,127 @@
+# Copyright 2012 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import os
+import uuid
+
+from keystone.tests import unit as tests
+from keystone.tests.unit import default_fixtures
+from keystone.tests.unit.ksfixtures import database
+from keystone.tests.unit import test_backend
+
+
+DEFAULT_CATALOG_TEMPLATES = os.path.abspath(os.path.join(
+ os.path.dirname(__file__),
+ 'default_catalog.templates'))
+
+
+class TestTemplatedCatalog(tests.TestCase, test_backend.CatalogTests):
+
+ DEFAULT_FIXTURE = {
+ 'RegionOne': {
+ 'compute': {
+ 'adminURL': 'http://localhost:8774/v1.1/bar',
+ 'publicURL': 'http://localhost:8774/v1.1/bar',
+ 'internalURL': 'http://localhost:8774/v1.1/bar',
+ 'name': "'Compute Service'",
+ 'id': '2'
+ },
+ 'identity': {
+ 'adminURL': 'http://localhost:35357/v2.0',
+ 'publicURL': 'http://localhost:5000/v2.0',
+ 'internalURL': 'http://localhost:35357/v2.0',
+ 'name': "'Identity Service'",
+ 'id': '1'
+ }
+ }
+ }
+
+ def setUp(self):
+ super(TestTemplatedCatalog, self).setUp()
+ self.useFixture(database.Database())
+ self.load_backends()
+ self.load_fixtures(default_fixtures)
+
+ def config_overrides(self):
+ super(TestTemplatedCatalog, self).config_overrides()
+ self.config_fixture.config(group='catalog',
+ template_file=DEFAULT_CATALOG_TEMPLATES)
+
+ def test_get_catalog(self):
+ catalog_ref = self.catalog_api.get_catalog('foo', 'bar')
+ self.assertDictEqual(catalog_ref, self.DEFAULT_FIXTURE)
+
+ def test_catalog_ignored_malformed_urls(self):
+ # both endpoints are in the catalog
+ catalog_ref = self.catalog_api.get_catalog('foo', 'bar')
+ self.assertEqual(2, len(catalog_ref['RegionOne']))
+
+ region = self.catalog_api.driver.templates['RegionOne']
+ region['compute']['adminURL'] = 'http://localhost:8774/v1.1/$(tenant)s'
+
+ # the malformed one has been removed
+ catalog_ref = self.catalog_api.get_catalog('foo', 'bar')
+ self.assertEqual(1, len(catalog_ref['RegionOne']))
+
+ def test_get_catalog_endpoint_disabled(self):
+ self.skipTest("Templated backend doesn't have disabled endpoints")
+
+ def test_get_v3_catalog_endpoint_disabled(self):
+ self.skipTest("Templated backend doesn't have disabled endpoints")
+
+ def assert_catalogs_equal(self, expected, observed):
+ for e, o in zip(sorted(expected), sorted(observed)):
+ expected_endpoints = e.pop('endpoints')
+ observed_endpoints = o.pop('endpoints')
+ self.assertDictEqual(e, o)
+ self.assertItemsEqual(expected_endpoints, observed_endpoints)
+
+ def test_get_v3_catalog(self):
+ user_id = uuid.uuid4().hex
+ project_id = uuid.uuid4().hex
+ catalog_ref = self.catalog_api.get_v3_catalog(user_id, project_id)
+ exp_catalog = [
+ {'endpoints': [
+ {'interface': 'admin',
+ 'region': 'RegionOne',
+ 'url': 'http://localhost:8774/v1.1/%s' % project_id},
+ {'interface': 'public',
+ 'region': 'RegionOne',
+ 'url': 'http://localhost:8774/v1.1/%s' % project_id},
+ {'interface': 'internal',
+ 'region': 'RegionOne',
+ 'url': 'http://localhost:8774/v1.1/%s' % project_id}],
+ 'type': 'compute',
+ 'name': "'Compute Service'",
+ 'id': '2'},
+ {'endpoints': [
+ {'interface': 'admin',
+ 'region': 'RegionOne',
+ 'url': 'http://localhost:35357/v2.0'},
+ {'interface': 'public',
+ 'region': 'RegionOne',
+ 'url': 'http://localhost:5000/v2.0'},
+ {'interface': 'internal',
+ 'region': 'RegionOne',
+ 'url': 'http://localhost:35357/v2.0'}],
+ 'type': 'identity',
+ 'name': "'Identity Service'",
+ 'id': '1'}]
+ self.assert_catalogs_equal(exp_catalog, catalog_ref)
+
+ def test_list_regions_filtered_by_parent_region_id(self):
+ self.skipTest('Templated backend does not support hints')
+
+ def test_service_filtering(self):
+ self.skipTest("Templated backend doesn't support filtering")
diff --git a/keystone-moon/keystone/tests/unit/test_cache.py b/keystone-moon/keystone/tests/unit/test_cache.py
new file mode 100644
index 00000000..5a778a07
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/test_cache.py
@@ -0,0 +1,322 @@
+# Copyright 2013 Metacloud
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import copy
+import time
+import uuid
+
+from dogpile.cache import api
+from dogpile.cache import proxy
+import mock
+from oslo_config import cfg
+
+from keystone.common import cache
+from keystone import exception
+from keystone.tests import unit as tests
+
+
+CONF = cfg.CONF
+NO_VALUE = api.NO_VALUE
+
+
+def _copy_value(value):
+ if value is not NO_VALUE:
+ value = copy.deepcopy(value)
+ return value
+
+
+# NOTE(morganfainberg): WARNING - It is not recommended to use the Memory
+# backend for dogpile.cache in a real deployment under any circumstances. The
+# backend does no cleanup of expired values and therefore will leak memory. The
+# backend is not implemented in a way to share data across processes (e.g.
+# Keystone in HTTPD. This proxy is a hack to get around the lack of isolation
+# of values in memory. Currently it blindly stores and retrieves the values
+# from the cache, and modifications to dicts/lists/etc returned can result in
+# changes to the cached values. In short, do not use the dogpile.cache.memory
+# backend unless you are running tests or expecting odd/strange results.
+class CacheIsolatingProxy(proxy.ProxyBackend):
+ """Proxy that forces a memory copy of stored values.
+ The default in-memory cache-region does not perform a copy on values it
+ is meant to cache. Therefore if the value is modified after set or after
+ get, the cached value also is modified. This proxy does a copy as the last
+ thing before storing data.
+ """
+ def get(self, key):
+ return _copy_value(self.proxied.get(key))
+
+ def set(self, key, value):
+ self.proxied.set(key, _copy_value(value))
+
+
+class TestProxy(proxy.ProxyBackend):
+ def get(self, key):
+ value = _copy_value(self.proxied.get(key))
+ if value is not NO_VALUE:
+ if isinstance(value[0], TestProxyValue):
+ value[0].cached = True
+ return value
+
+
+class TestProxyValue(object):
+ def __init__(self, value):
+ self.value = value
+ self.cached = False
+
+
+class CacheRegionTest(tests.TestCase):
+
+ def setUp(self):
+ super(CacheRegionTest, self).setUp()
+ self.region = cache.make_region()
+ cache.configure_cache_region(self.region)
+ self.region.wrap(TestProxy)
+ self.test_value = TestProxyValue('Decorator Test')
+
+ def _add_test_caching_option(self):
+ self.config_fixture.register_opt(
+ cfg.BoolOpt('caching', default=True), group='cache')
+
+ def _get_cacheable_function(self):
+ with mock.patch.object(cache.REGION, 'cache_on_arguments',
+ self.region.cache_on_arguments):
+ memoize = cache.get_memoization_decorator(section='cache')
+
+ @memoize
+ def cacheable_function(value):
+ return value
+
+ return cacheable_function
+
+ def test_region_built_with_proxy_direct_cache_test(self):
+ # Verify cache regions are properly built with proxies.
+ test_value = TestProxyValue('Direct Cache Test')
+ self.region.set('cache_test', test_value)
+ cached_value = self.region.get('cache_test')
+ self.assertTrue(cached_value.cached)
+
+ def test_cache_region_no_error_multiple_config(self):
+ # Verify configuring the CacheRegion again doesn't error.
+ cache.configure_cache_region(self.region)
+ cache.configure_cache_region(self.region)
+
+ def _get_cache_fallthrough_fn(self, cache_time):
+ with mock.patch.object(cache.REGION, 'cache_on_arguments',
+ self.region.cache_on_arguments):
+ memoize = cache.get_memoization_decorator(
+ section='cache',
+ expiration_section='assignment')
+
+ class _test_obj(object):
+ def __init__(self, value):
+ self.test_value = value
+
+ @memoize
+ def get_test_value(self):
+ return self.test_value
+
+ def _do_test(value):
+
+ test_obj = _test_obj(value)
+
+ # Ensure the value has been cached
+ test_obj.get_test_value()
+ # Get the now cached value
+ cached_value = test_obj.get_test_value()
+ self.assertTrue(cached_value.cached)
+ self.assertEqual(value.value, cached_value.value)
+ self.assertEqual(cached_value.value, test_obj.test_value.value)
+ # Change the underlying value on the test object.
+ test_obj.test_value = TestProxyValue(uuid.uuid4().hex)
+ self.assertEqual(cached_value.value,
+ test_obj.get_test_value().value)
+ # override the system time to ensure the non-cached new value
+ # is returned
+ new_time = time.time() + (cache_time * 2)
+ with mock.patch.object(time, 'time',
+ return_value=new_time):
+ overriden_cache_value = test_obj.get_test_value()
+ self.assertNotEqual(cached_value.value,
+ overriden_cache_value.value)
+ self.assertEqual(test_obj.test_value.value,
+ overriden_cache_value.value)
+
+ return _do_test
+
+ def test_cache_no_fallthrough_expiration_time_fn(self):
+ # Since we do not re-configure the cache region, for ease of testing
+ # this value is set the same as the expiration_time default in the
+ # [cache] section
+ cache_time = 600
+ expiration_time = cache.get_expiration_time_fn('role')
+ do_test = self._get_cache_fallthrough_fn(cache_time)
+ # Run the test with the assignment cache_time value
+ self.config_fixture.config(cache_time=cache_time,
+ group='role')
+ test_value = TestProxyValue(uuid.uuid4().hex)
+ self.assertEqual(cache_time, expiration_time())
+ do_test(value=test_value)
+
+ def test_cache_fallthrough_expiration_time_fn(self):
+ # Since we do not re-configure the cache region, for ease of testing
+ # this value is set the same as the expiration_time default in the
+ # [cache] section
+ cache_time = 599
+ expiration_time = cache.get_expiration_time_fn('role')
+ do_test = self._get_cache_fallthrough_fn(cache_time)
+ # Run the test with the assignment cache_time value set to None and
+ # the global value set.
+ self.config_fixture.config(cache_time=None, group='role')
+ test_value = TestProxyValue(uuid.uuid4().hex)
+ self.assertIsNone(expiration_time())
+ do_test(value=test_value)
+
+ def test_should_cache_fn_global_cache_enabled(self):
+ # Verify should_cache_fn generates a sane function for subsystem and
+ # functions as expected with caching globally enabled.
+ cacheable_function = self._get_cacheable_function()
+
+ self.config_fixture.config(group='cache', enabled=True)
+ cacheable_function(self.test_value)
+ cached_value = cacheable_function(self.test_value)
+ self.assertTrue(cached_value.cached)
+
+ def test_should_cache_fn_global_cache_disabled(self):
+ # Verify should_cache_fn generates a sane function for subsystem and
+ # functions as expected with caching globally disabled.
+ cacheable_function = self._get_cacheable_function()
+
+ self.config_fixture.config(group='cache', enabled=False)
+ cacheable_function(self.test_value)
+ cached_value = cacheable_function(self.test_value)
+ self.assertFalse(cached_value.cached)
+
+ def test_should_cache_fn_global_cache_disabled_section_cache_enabled(self):
+ # Verify should_cache_fn generates a sane function for subsystem and
+ # functions as expected with caching globally disabled and the specific
+ # section caching enabled.
+ cacheable_function = self._get_cacheable_function()
+
+ self._add_test_caching_option()
+ self.config_fixture.config(group='cache', enabled=False)
+ self.config_fixture.config(group='cache', caching=True)
+
+ cacheable_function(self.test_value)
+ cached_value = cacheable_function(self.test_value)
+ self.assertFalse(cached_value.cached)
+
+ def test_should_cache_fn_global_cache_enabled_section_cache_disabled(self):
+ # Verify should_cache_fn generates a sane function for subsystem and
+ # functions as expected with caching globally enabled and the specific
+ # section caching disabled.
+ cacheable_function = self._get_cacheable_function()
+
+ self._add_test_caching_option()
+ self.config_fixture.config(group='cache', enabled=True)
+ self.config_fixture.config(group='cache', caching=False)
+
+ cacheable_function(self.test_value)
+ cached_value = cacheable_function(self.test_value)
+ self.assertFalse(cached_value.cached)
+
+ def test_should_cache_fn_global_cache_enabled_section_cache_enabled(self):
+ # Verify should_cache_fn generates a sane function for subsystem and
+ # functions as expected with caching globally enabled and the specific
+ # section caching enabled.
+ cacheable_function = self._get_cacheable_function()
+
+ self._add_test_caching_option()
+ self.config_fixture.config(group='cache', enabled=True)
+ self.config_fixture.config(group='cache', caching=True)
+
+ cacheable_function(self.test_value)
+ cached_value = cacheable_function(self.test_value)
+ self.assertTrue(cached_value.cached)
+
+ def test_cache_dictionary_config_builder(self):
+ """Validate we build a sane dogpile.cache dictionary config."""
+ self.config_fixture.config(group='cache',
+ config_prefix='test_prefix',
+ backend='some_test_backend',
+ expiration_time=86400,
+ backend_argument=['arg1:test',
+ 'arg2:test:test',
+ 'arg3.invalid'])
+
+ config_dict = cache.build_cache_config()
+ self.assertEqual(
+ CONF.cache.backend, config_dict['test_prefix.backend'])
+ self.assertEqual(
+ CONF.cache.expiration_time,
+ config_dict['test_prefix.expiration_time'])
+ self.assertEqual('test', config_dict['test_prefix.arguments.arg1'])
+ self.assertEqual('test:test',
+ config_dict['test_prefix.arguments.arg2'])
+ self.assertNotIn('test_prefix.arguments.arg3', config_dict)
+
+ def test_cache_debug_proxy(self):
+ single_value = 'Test Value'
+ single_key = 'testkey'
+ multi_values = {'key1': 1, 'key2': 2, 'key3': 3}
+
+ self.region.set(single_key, single_value)
+ self.assertEqual(single_value, self.region.get(single_key))
+
+ self.region.delete(single_key)
+ self.assertEqual(NO_VALUE, self.region.get(single_key))
+
+ self.region.set_multi(multi_values)
+ cached_values = self.region.get_multi(multi_values.keys())
+ for value in multi_values.values():
+ self.assertIn(value, cached_values)
+ self.assertEqual(len(multi_values.values()), len(cached_values))
+
+ self.region.delete_multi(multi_values.keys())
+ for value in self.region.get_multi(multi_values.keys()):
+ self.assertEqual(NO_VALUE, value)
+
+ def test_configure_non_region_object_raises_error(self):
+ self.assertRaises(exception.ValidationError,
+ cache.configure_cache_region,
+ "bogus")
+
+
+class CacheNoopBackendTest(tests.TestCase):
+
+ def setUp(self):
+ super(CacheNoopBackendTest, self).setUp()
+ self.region = cache.make_region()
+ cache.configure_cache_region(self.region)
+
+ def config_overrides(self):
+ super(CacheNoopBackendTest, self).config_overrides()
+ self.config_fixture.config(group='cache',
+ backend='keystone.common.cache.noop')
+
+ def test_noop_backend(self):
+ single_value = 'Test Value'
+ single_key = 'testkey'
+ multi_values = {'key1': 1, 'key2': 2, 'key3': 3}
+
+ self.region.set(single_key, single_value)
+ self.assertEqual(NO_VALUE, self.region.get(single_key))
+
+ self.region.set_multi(multi_values)
+ cached_values = self.region.get_multi(multi_values.keys())
+ self.assertEqual(len(cached_values), len(multi_values.values()))
+ for value in cached_values:
+ self.assertEqual(NO_VALUE, value)
+
+ # Delete should not raise exceptions
+ self.region.delete(single_key)
+ self.region.delete_multi(multi_values.keys())
diff --git a/keystone-moon/keystone/tests/unit/test_cache_backend_mongo.py b/keystone-moon/keystone/tests/unit/test_cache_backend_mongo.py
new file mode 100644
index 00000000..a56bf754
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/test_cache_backend_mongo.py
@@ -0,0 +1,727 @@
+# Copyright 2014 Hewlett-Packard Development Company, L.P.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import collections
+import copy
+import functools
+import uuid
+
+from dogpile.cache import api
+from dogpile.cache import region as dp_region
+import six
+
+from keystone.common.cache.backends import mongo
+from keystone import exception
+from keystone.tests import unit as tests
+
+
+# Mock database structure sample where 'ks_cache' is database and
+# 'cache' is collection. Dogpile CachedValue data is divided in two
+# fields `value` (CachedValue.payload) and `meta` (CachedValue.metadata)
+ks_cache = {
+ "cache": [
+ {
+ "value": {
+ "serviceType": "identity",
+ "allVersionsUrl": "https://dummyUrl",
+ "dateLastModified": "ISODDate(2014-02-08T18:39:13.237Z)",
+ "serviceName": "Identity",
+ "enabled": "True"
+ },
+ "meta": {
+ "v": 1,
+ "ct": 1392371422.015121
+ },
+ "doc_date": "ISODate('2014-02-14T09:50:22.015Z')",
+ "_id": "8251dc95f63842719c077072f1047ddf"
+ },
+ {
+ "value": "dummyValueX",
+ "meta": {
+ "v": 1,
+ "ct": 1392371422.014058
+ },
+ "doc_date": "ISODate('2014-02-14T09:50:22.014Z')",
+ "_id": "66730b9534d146f0804d23729ad35436"
+ }
+ ]
+}
+
+
+COLLECTIONS = {}
+SON_MANIPULATOR = None
+
+
+class MockCursor(object):
+
+ def __init__(self, collection, dataset_factory):
+ super(MockCursor, self).__init__()
+ self.collection = collection
+ self._factory = dataset_factory
+ self._dataset = self._factory()
+ self._limit = None
+ self._skip = None
+
+ def __iter__(self):
+ return self
+
+ def __next__(self):
+ if self._skip:
+ for _ in range(self._skip):
+ next(self._dataset)
+ self._skip = None
+ if self._limit is not None and self._limit <= 0:
+ raise StopIteration()
+ if self._limit is not None:
+ self._limit -= 1
+ return next(self._dataset)
+
+ next = __next__
+
+ def __getitem__(self, index):
+ arr = [x for x in self._dataset]
+ self._dataset = iter(arr)
+ return arr[index]
+
+
+class MockCollection(object):
+
+ def __init__(self, db, name):
+ super(MockCollection, self).__init__()
+ self.name = name
+ self._collection_database = db
+ self._documents = {}
+ self.write_concern = {}
+
+ def __getattr__(self, name):
+ if name == 'database':
+ return self._collection_database
+
+ def ensure_index(self, key_or_list, *args, **kwargs):
+ pass
+
+ def index_information(self):
+ return {}
+
+ def find_one(self, spec_or_id=None, *args, **kwargs):
+ if spec_or_id is None:
+ spec_or_id = {}
+ if not isinstance(spec_or_id, collections.Mapping):
+ spec_or_id = {'_id': spec_or_id}
+
+ try:
+ return next(self.find(spec_or_id, *args, **kwargs))
+ except StopIteration:
+ return None
+
+ def find(self, spec=None, *args, **kwargs):
+ return MockCursor(self, functools.partial(self._get_dataset, spec))
+
+ def _get_dataset(self, spec):
+ dataset = (self._copy_doc(document, dict) for document in
+ self._iter_documents(spec))
+ return dataset
+
+ def _iter_documents(self, spec=None):
+ return (SON_MANIPULATOR.transform_outgoing(document, self) for
+ document in six.itervalues(self._documents)
+ if self._apply_filter(document, spec))
+
+ def _apply_filter(self, document, query):
+ for key, search in six.iteritems(query):
+ doc_val = document.get(key)
+ if isinstance(search, dict):
+ op_dict = {'$in': lambda dv, sv: dv in sv}
+ is_match = all(
+ op_str in op_dict and op_dict[op_str](doc_val, search_val)
+ for op_str, search_val in six.iteritems(search)
+ )
+ else:
+ is_match = doc_val == search
+
+ return is_match
+
+ def _copy_doc(self, obj, container):
+ if isinstance(obj, list):
+ new = []
+ for item in obj:
+ new.append(self._copy_doc(item, container))
+ return new
+ if isinstance(obj, dict):
+ new = container()
+ for key, value in obj.items():
+ new[key] = self._copy_doc(value, container)
+ return new
+ else:
+ return copy.copy(obj)
+
+ def insert(self, data, manipulate=True, **kwargs):
+ if isinstance(data, list):
+ return [self._insert(element) for element in data]
+ return self._insert(data)
+
+ def save(self, data, manipulate=True, **kwargs):
+ return self._insert(data)
+
+ def _insert(self, data):
+ if '_id' not in data:
+ data['_id'] = uuid.uuid4().hex
+ object_id = data['_id']
+ self._documents[object_id] = self._internalize_dict(data)
+ return object_id
+
+ def find_and_modify(self, spec, document, upsert=False, **kwargs):
+ self.update(spec, document, upsert, **kwargs)
+
+ def update(self, spec, document, upsert=False, **kwargs):
+
+ existing_docs = [doc for doc in six.itervalues(self._documents)
+ if self._apply_filter(doc, spec)]
+ if existing_docs:
+ existing_doc = existing_docs[0] # should find only 1 match
+ _id = existing_doc['_id']
+ existing_doc.clear()
+ existing_doc['_id'] = _id
+ existing_doc.update(self._internalize_dict(document))
+ elif upsert:
+ existing_doc = self._documents[self._insert(document)]
+
+ def _internalize_dict(self, d):
+ return {k: copy.deepcopy(v) for k, v in six.iteritems(d)}
+
+ def remove(self, spec_or_id=None, search_filter=None):
+ """Remove objects matching spec_or_id from the collection."""
+ if spec_or_id is None:
+ spec_or_id = search_filter if search_filter else {}
+ if not isinstance(spec_or_id, dict):
+ spec_or_id = {'_id': spec_or_id}
+ to_delete = list(self.find(spec=spec_or_id))
+ for doc in to_delete:
+ doc_id = doc['_id']
+ del self._documents[doc_id]
+
+ return {
+ "connectionId": uuid.uuid4().hex,
+ "n": len(to_delete),
+ "ok": 1.0,
+ "err": None,
+ }
+
+
+class MockMongoDB(object):
+ def __init__(self, dbname):
+ self._dbname = dbname
+ self.mainpulator = None
+
+ def authenticate(self, username, password):
+ pass
+
+ def add_son_manipulator(self, manipulator):
+ global SON_MANIPULATOR
+ SON_MANIPULATOR = manipulator
+
+ def __getattr__(self, name):
+ if name == 'authenticate':
+ return self.authenticate
+ elif name == 'name':
+ return self._dbname
+ elif name == 'add_son_manipulator':
+ return self.add_son_manipulator
+ else:
+ return get_collection(self._dbname, name)
+
+ def __getitem__(self, name):
+ return get_collection(self._dbname, name)
+
+
+class MockMongoClient(object):
+ def __init__(self, *args, **kwargs):
+ pass
+
+ def __getattr__(self, dbname):
+ return MockMongoDB(dbname)
+
+
+def get_collection(db_name, collection_name):
+ mongo_collection = MockCollection(MockMongoDB(db_name), collection_name)
+ return mongo_collection
+
+
+def pymongo_override():
+ global pymongo
+ import pymongo
+ if pymongo.MongoClient is not MockMongoClient:
+ pymongo.MongoClient = MockMongoClient
+ if pymongo.MongoReplicaSetClient is not MockMongoClient:
+ pymongo.MongoClient = MockMongoClient
+
+
+class MyTransformer(mongo.BaseTransform):
+ """Added here just to check manipulator logic is used correctly."""
+
+ def transform_incoming(self, son, collection):
+ return super(MyTransformer, self).transform_incoming(son, collection)
+
+ def transform_outgoing(self, son, collection):
+ return super(MyTransformer, self).transform_outgoing(son, collection)
+
+
+class MongoCache(tests.BaseTestCase):
+ def setUp(self):
+ super(MongoCache, self).setUp()
+ global COLLECTIONS
+ COLLECTIONS = {}
+ mongo.MongoApi._DB = {}
+ mongo.MongoApi._MONGO_COLLS = {}
+ pymongo_override()
+ # using typical configuration
+ self.arguments = {
+ 'db_hosts': 'localhost:27017',
+ 'db_name': 'ks_cache',
+ 'cache_collection': 'cache',
+ 'username': 'test_user',
+ 'password': 'test_password'
+ }
+
+ def test_missing_db_hosts(self):
+ self.arguments.pop('db_hosts')
+ region = dp_region.make_region()
+ self.assertRaises(exception.ValidationError, region.configure,
+ 'keystone.cache.mongo',
+ arguments=self.arguments)
+
+ def test_missing_db_name(self):
+ self.arguments.pop('db_name')
+ region = dp_region.make_region()
+ self.assertRaises(exception.ValidationError, region.configure,
+ 'keystone.cache.mongo',
+ arguments=self.arguments)
+
+ def test_missing_cache_collection_name(self):
+ self.arguments.pop('cache_collection')
+ region = dp_region.make_region()
+ self.assertRaises(exception.ValidationError, region.configure,
+ 'keystone.cache.mongo',
+ arguments=self.arguments)
+
+ def test_incorrect_write_concern(self):
+ self.arguments['w'] = 'one value'
+ region = dp_region.make_region()
+ self.assertRaises(exception.ValidationError, region.configure,
+ 'keystone.cache.mongo',
+ arguments=self.arguments)
+
+ def test_correct_write_concern(self):
+ self.arguments['w'] = 1
+ region = dp_region.make_region().configure('keystone.cache.mongo',
+ arguments=self.arguments)
+
+ random_key = uuid.uuid4().hex
+ region.set(random_key, "dummyValue10")
+ # There is no proxy so can access MongoCacheBackend directly
+ self.assertEqual(1, region.backend.api.w)
+
+ def test_incorrect_read_preference(self):
+ self.arguments['read_preference'] = 'inValidValue'
+ region = dp_region.make_region().configure('keystone.cache.mongo',
+ arguments=self.arguments)
+ # As per delayed loading of pymongo, read_preference value should
+ # still be string and NOT enum
+ self.assertEqual('inValidValue', region.backend.api.read_preference)
+
+ random_key = uuid.uuid4().hex
+ self.assertRaises(ValueError, region.set,
+ random_key, "dummyValue10")
+
+ def test_correct_read_preference(self):
+ self.arguments['read_preference'] = 'secondaryPreferred'
+ region = dp_region.make_region().configure('keystone.cache.mongo',
+ arguments=self.arguments)
+ # As per delayed loading of pymongo, read_preference value should
+ # still be string and NOT enum
+ self.assertEqual('secondaryPreferred',
+ region.backend.api.read_preference)
+
+ random_key = uuid.uuid4().hex
+ region.set(random_key, "dummyValue10")
+
+ # Now as pymongo is loaded so expected read_preference value is enum.
+ # There is no proxy so can access MongoCacheBackend directly
+ self.assertEqual(3, region.backend.api.read_preference)
+
+ def test_missing_replica_set_name(self):
+ self.arguments['use_replica'] = True
+ region = dp_region.make_region()
+ self.assertRaises(exception.ValidationError, region.configure,
+ 'keystone.cache.mongo',
+ arguments=self.arguments)
+
+ def test_provided_replica_set_name(self):
+ self.arguments['use_replica'] = True
+ self.arguments['replicaset_name'] = 'my_replica'
+ dp_region.make_region().configure('keystone.cache.mongo',
+ arguments=self.arguments)
+ self.assertTrue(True) # reached here means no initialization error
+
+ def test_incorrect_mongo_ttl_seconds(self):
+ self.arguments['mongo_ttl_seconds'] = 'sixty'
+ region = dp_region.make_region()
+ self.assertRaises(exception.ValidationError, region.configure,
+ 'keystone.cache.mongo',
+ arguments=self.arguments)
+
+ def test_cache_configuration_values_assertion(self):
+ self.arguments['use_replica'] = True
+ self.arguments['replicaset_name'] = 'my_replica'
+ self.arguments['mongo_ttl_seconds'] = 60
+ self.arguments['ssl'] = False
+ region = dp_region.make_region().configure('keystone.cache.mongo',
+ arguments=self.arguments)
+ # There is no proxy so can access MongoCacheBackend directly
+ self.assertEqual('localhost:27017', region.backend.api.hosts)
+ self.assertEqual('ks_cache', region.backend.api.db_name)
+ self.assertEqual('cache', region.backend.api.cache_collection)
+ self.assertEqual('test_user', region.backend.api.username)
+ self.assertEqual('test_password', region.backend.api.password)
+ self.assertEqual(True, region.backend.api.use_replica)
+ self.assertEqual('my_replica', region.backend.api.replicaset_name)
+ self.assertEqual(False, region.backend.api.conn_kwargs['ssl'])
+ self.assertEqual(60, region.backend.api.ttl_seconds)
+
+ def test_multiple_region_cache_configuration(self):
+ arguments1 = copy.copy(self.arguments)
+ arguments1['cache_collection'] = 'cache_region1'
+
+ region1 = dp_region.make_region().configure('keystone.cache.mongo',
+ arguments=arguments1)
+ # There is no proxy so can access MongoCacheBackend directly
+ self.assertEqual('localhost:27017', region1.backend.api.hosts)
+ self.assertEqual('ks_cache', region1.backend.api.db_name)
+ self.assertEqual('cache_region1', region1.backend.api.cache_collection)
+ self.assertEqual('test_user', region1.backend.api.username)
+ self.assertEqual('test_password', region1.backend.api.password)
+ # Should be None because of delayed initialization
+ self.assertIsNone(region1.backend.api._data_manipulator)
+
+ random_key1 = uuid.uuid4().hex
+ region1.set(random_key1, "dummyValue10")
+ self.assertEqual("dummyValue10", region1.get(random_key1))
+ # Now should have initialized
+ self.assertIsInstance(region1.backend.api._data_manipulator,
+ mongo.BaseTransform)
+
+ class_name = '%s.%s' % (MyTransformer.__module__, "MyTransformer")
+
+ arguments2 = copy.copy(self.arguments)
+ arguments2['cache_collection'] = 'cache_region2'
+ arguments2['son_manipulator'] = class_name
+
+ region2 = dp_region.make_region().configure('keystone.cache.mongo',
+ arguments=arguments2)
+ # There is no proxy so can access MongoCacheBackend directly
+ self.assertEqual('localhost:27017', region2.backend.api.hosts)
+ self.assertEqual('ks_cache', region2.backend.api.db_name)
+ self.assertEqual('cache_region2', region2.backend.api.cache_collection)
+
+ # Should be None because of delayed initialization
+ self.assertIsNone(region2.backend.api._data_manipulator)
+
+ random_key = uuid.uuid4().hex
+ region2.set(random_key, "dummyValue20")
+ self.assertEqual("dummyValue20", region2.get(random_key))
+ # Now should have initialized
+ self.assertIsInstance(region2.backend.api._data_manipulator,
+ MyTransformer)
+
+ region1.set(random_key1, "dummyValue22")
+ self.assertEqual("dummyValue22", region1.get(random_key1))
+
+ def test_typical_configuration(self):
+
+ dp_region.make_region().configure(
+ 'keystone.cache.mongo',
+ arguments=self.arguments
+ )
+ self.assertTrue(True) # reached here means no initialization error
+
+ def test_backend_get_missing_data(self):
+
+ region = dp_region.make_region().configure(
+ 'keystone.cache.mongo',
+ arguments=self.arguments
+ )
+
+ random_key = uuid.uuid4().hex
+ # should return NO_VALUE as key does not exist in cache
+ self.assertEqual(api.NO_VALUE, region.get(random_key))
+
+ def test_backend_set_data(self):
+
+ region = dp_region.make_region().configure(
+ 'keystone.cache.mongo',
+ arguments=self.arguments
+ )
+
+ random_key = uuid.uuid4().hex
+ region.set(random_key, "dummyValue")
+ self.assertEqual("dummyValue", region.get(random_key))
+
+ def test_backend_set_data_with_string_as_valid_ttl(self):
+
+ self.arguments['mongo_ttl_seconds'] = '3600'
+ region = dp_region.make_region().configure('keystone.cache.mongo',
+ arguments=self.arguments)
+ self.assertEqual(3600, region.backend.api.ttl_seconds)
+ random_key = uuid.uuid4().hex
+ region.set(random_key, "dummyValue")
+ self.assertEqual("dummyValue", region.get(random_key))
+
+ def test_backend_set_data_with_int_as_valid_ttl(self):
+
+ self.arguments['mongo_ttl_seconds'] = 1800
+ region = dp_region.make_region().configure('keystone.cache.mongo',
+ arguments=self.arguments)
+ self.assertEqual(1800, region.backend.api.ttl_seconds)
+ random_key = uuid.uuid4().hex
+ region.set(random_key, "dummyValue")
+ self.assertEqual("dummyValue", region.get(random_key))
+
+ def test_backend_set_none_as_data(self):
+
+ region = dp_region.make_region().configure(
+ 'keystone.cache.mongo',
+ arguments=self.arguments
+ )
+
+ random_key = uuid.uuid4().hex
+ region.set(random_key, None)
+ self.assertIsNone(region.get(random_key))
+
+ def test_backend_set_blank_as_data(self):
+
+ region = dp_region.make_region().configure(
+ 'keystone.cache.mongo',
+ arguments=self.arguments
+ )
+
+ random_key = uuid.uuid4().hex
+ region.set(random_key, "")
+ self.assertEqual("", region.get(random_key))
+
+ def test_backend_set_same_key_multiple_times(self):
+
+ region = dp_region.make_region().configure(
+ 'keystone.cache.mongo',
+ arguments=self.arguments
+ )
+
+ random_key = uuid.uuid4().hex
+ region.set(random_key, "dummyValue")
+ self.assertEqual("dummyValue", region.get(random_key))
+
+ dict_value = {'key1': 'value1'}
+ region.set(random_key, dict_value)
+ self.assertEqual(dict_value, region.get(random_key))
+
+ region.set(random_key, "dummyValue2")
+ self.assertEqual("dummyValue2", region.get(random_key))
+
+ def test_backend_multi_set_data(self):
+
+ region = dp_region.make_region().configure(
+ 'keystone.cache.mongo',
+ arguments=self.arguments
+ )
+ random_key = uuid.uuid4().hex
+ random_key1 = uuid.uuid4().hex
+ random_key2 = uuid.uuid4().hex
+ random_key3 = uuid.uuid4().hex
+ mapping = {random_key1: 'dummyValue1',
+ random_key2: 'dummyValue2',
+ random_key3: 'dummyValue3'}
+ region.set_multi(mapping)
+ # should return NO_VALUE as key does not exist in cache
+ self.assertEqual(api.NO_VALUE, region.get(random_key))
+ self.assertFalse(region.get(random_key))
+ self.assertEqual("dummyValue1", region.get(random_key1))
+ self.assertEqual("dummyValue2", region.get(random_key2))
+ self.assertEqual("dummyValue3", region.get(random_key3))
+
+ def test_backend_multi_get_data(self):
+
+ region = dp_region.make_region().configure(
+ 'keystone.cache.mongo',
+ arguments=self.arguments
+ )
+ random_key = uuid.uuid4().hex
+ random_key1 = uuid.uuid4().hex
+ random_key2 = uuid.uuid4().hex
+ random_key3 = uuid.uuid4().hex
+ mapping = {random_key1: 'dummyValue1',
+ random_key2: '',
+ random_key3: 'dummyValue3'}
+ region.set_multi(mapping)
+
+ keys = [random_key, random_key1, random_key2, random_key3]
+ results = region.get_multi(keys)
+ # should return NO_VALUE as key does not exist in cache
+ self.assertEqual(api.NO_VALUE, results[0])
+ self.assertEqual("dummyValue1", results[1])
+ self.assertEqual("", results[2])
+ self.assertEqual("dummyValue3", results[3])
+
+ def test_backend_multi_set_should_update_existing(self):
+
+ region = dp_region.make_region().configure(
+ 'keystone.cache.mongo',
+ arguments=self.arguments
+ )
+ random_key = uuid.uuid4().hex
+ random_key1 = uuid.uuid4().hex
+ random_key2 = uuid.uuid4().hex
+ random_key3 = uuid.uuid4().hex
+ mapping = {random_key1: 'dummyValue1',
+ random_key2: 'dummyValue2',
+ random_key3: 'dummyValue3'}
+ region.set_multi(mapping)
+ # should return NO_VALUE as key does not exist in cache
+ self.assertEqual(api.NO_VALUE, region.get(random_key))
+ self.assertEqual("dummyValue1", region.get(random_key1))
+ self.assertEqual("dummyValue2", region.get(random_key2))
+ self.assertEqual("dummyValue3", region.get(random_key3))
+
+ mapping = {random_key1: 'dummyValue4',
+ random_key2: 'dummyValue5'}
+ region.set_multi(mapping)
+ self.assertEqual(api.NO_VALUE, region.get(random_key))
+ self.assertEqual("dummyValue4", region.get(random_key1))
+ self.assertEqual("dummyValue5", region.get(random_key2))
+ self.assertEqual("dummyValue3", region.get(random_key3))
+
+ def test_backend_multi_set_get_with_blanks_none(self):
+
+ region = dp_region.make_region().configure(
+ 'keystone.cache.mongo',
+ arguments=self.arguments
+ )
+ random_key = uuid.uuid4().hex
+ random_key1 = uuid.uuid4().hex
+ random_key2 = uuid.uuid4().hex
+ random_key3 = uuid.uuid4().hex
+ random_key4 = uuid.uuid4().hex
+ mapping = {random_key1: 'dummyValue1',
+ random_key2: None,
+ random_key3: '',
+ random_key4: 'dummyValue4'}
+ region.set_multi(mapping)
+ # should return NO_VALUE as key does not exist in cache
+ self.assertEqual(api.NO_VALUE, region.get(random_key))
+ self.assertEqual("dummyValue1", region.get(random_key1))
+ self.assertIsNone(region.get(random_key2))
+ self.assertEqual("", region.get(random_key3))
+ self.assertEqual("dummyValue4", region.get(random_key4))
+
+ keys = [random_key, random_key1, random_key2, random_key3, random_key4]
+ results = region.get_multi(keys)
+
+ # should return NO_VALUE as key does not exist in cache
+ self.assertEqual(api.NO_VALUE, results[0])
+ self.assertEqual("dummyValue1", results[1])
+ self.assertIsNone(results[2])
+ self.assertEqual("", results[3])
+ self.assertEqual("dummyValue4", results[4])
+
+ mapping = {random_key1: 'dummyValue5',
+ random_key2: 'dummyValue6'}
+ region.set_multi(mapping)
+ self.assertEqual(api.NO_VALUE, region.get(random_key))
+ self.assertEqual("dummyValue5", region.get(random_key1))
+ self.assertEqual("dummyValue6", region.get(random_key2))
+ self.assertEqual("", region.get(random_key3))
+
+ def test_backend_delete_data(self):
+
+ region = dp_region.make_region().configure(
+ 'keystone.cache.mongo',
+ arguments=self.arguments
+ )
+
+ random_key = uuid.uuid4().hex
+ region.set(random_key, "dummyValue")
+ self.assertEqual("dummyValue", region.get(random_key))
+
+ region.delete(random_key)
+ # should return NO_VALUE as key no longer exists in cache
+ self.assertEqual(api.NO_VALUE, region.get(random_key))
+
+ def test_backend_multi_delete_data(self):
+
+ region = dp_region.make_region().configure(
+ 'keystone.cache.mongo',
+ arguments=self.arguments
+ )
+ random_key = uuid.uuid4().hex
+ random_key1 = uuid.uuid4().hex
+ random_key2 = uuid.uuid4().hex
+ random_key3 = uuid.uuid4().hex
+ mapping = {random_key1: 'dummyValue1',
+ random_key2: 'dummyValue2',
+ random_key3: 'dummyValue3'}
+ region.set_multi(mapping)
+ # should return NO_VALUE as key does not exist in cache
+ self.assertEqual(api.NO_VALUE, region.get(random_key))
+ self.assertEqual("dummyValue1", region.get(random_key1))
+ self.assertEqual("dummyValue2", region.get(random_key2))
+ self.assertEqual("dummyValue3", region.get(random_key3))
+ self.assertEqual(api.NO_VALUE, region.get("InvalidKey"))
+
+ keys = mapping.keys()
+
+ region.delete_multi(keys)
+
+ self.assertEqual(api.NO_VALUE, region.get("InvalidKey"))
+ # should return NO_VALUE as keys no longer exist in cache
+ self.assertEqual(api.NO_VALUE, region.get(random_key1))
+ self.assertEqual(api.NO_VALUE, region.get(random_key2))
+ self.assertEqual(api.NO_VALUE, region.get(random_key3))
+
+ def test_additional_crud_method_arguments_support(self):
+ """Additional arguments should works across find/insert/update."""
+
+ self.arguments['wtimeout'] = 30000
+ self.arguments['j'] = True
+ self.arguments['continue_on_error'] = True
+ self.arguments['secondary_acceptable_latency_ms'] = 60
+ region = dp_region.make_region().configure(
+ 'keystone.cache.mongo',
+ arguments=self.arguments
+ )
+
+ # There is no proxy so can access MongoCacheBackend directly
+ api_methargs = region.backend.api.meth_kwargs
+ self.assertEqual(30000, api_methargs['wtimeout'])
+ self.assertEqual(True, api_methargs['j'])
+ self.assertEqual(True, api_methargs['continue_on_error'])
+ self.assertEqual(60, api_methargs['secondary_acceptable_latency_ms'])
+
+ random_key = uuid.uuid4().hex
+ region.set(random_key, "dummyValue1")
+ self.assertEqual("dummyValue1", region.get(random_key))
+
+ region.set(random_key, "dummyValue2")
+ self.assertEqual("dummyValue2", region.get(random_key))
+
+ random_key = uuid.uuid4().hex
+ region.set(random_key, "dummyValue3")
+ self.assertEqual("dummyValue3", region.get(random_key))
diff --git a/keystone-moon/keystone/tests/unit/test_catalog.py b/keystone-moon/keystone/tests/unit/test_catalog.py
new file mode 100644
index 00000000..9dda5d83
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/test_catalog.py
@@ -0,0 +1,219 @@
+# Copyright 2013 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import uuid
+
+import six
+
+from keystone import catalog
+from keystone.tests import unit as tests
+from keystone.tests.unit.ksfixtures import database
+from keystone.tests.unit import rest
+
+
+BASE_URL = 'http://127.0.0.1:35357/v2'
+SERVICE_FIXTURE = object()
+
+
+class V2CatalogTestCase(rest.RestfulTestCase):
+ def setUp(self):
+ super(V2CatalogTestCase, self).setUp()
+ self.useFixture(database.Database())
+
+ self.service_id = uuid.uuid4().hex
+ self.service = self.new_service_ref()
+ self.service['id'] = self.service_id
+ self.catalog_api.create_service(
+ self.service_id,
+ self.service.copy())
+
+ # TODO(termie): add an admin user to the fixtures and use that user
+ # override the fixtures, for now
+ self.assignment_api.add_role_to_user_and_project(
+ self.user_foo['id'],
+ self.tenant_bar['id'],
+ self.role_admin['id'])
+
+ def config_overrides(self):
+ super(V2CatalogTestCase, self).config_overrides()
+ self.config_fixture.config(
+ group='catalog',
+ driver='keystone.catalog.backends.sql.Catalog')
+
+ def new_ref(self):
+ """Populates a ref with attributes common to all API entities."""
+ return {
+ 'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'description': uuid.uuid4().hex,
+ 'enabled': True}
+
+ def new_service_ref(self):
+ ref = self.new_ref()
+ ref['type'] = uuid.uuid4().hex
+ return ref
+
+ def _get_token_id(self, r):
+ """Applicable only to JSON."""
+ return r.result['access']['token']['id']
+
+ def _endpoint_create(self, expected_status=200, service_id=SERVICE_FIXTURE,
+ publicurl='http://localhost:8080',
+ internalurl='http://localhost:8080',
+ adminurl='http://localhost:8080'):
+ if service_id is SERVICE_FIXTURE:
+ service_id = self.service_id
+ # FIXME(dolph): expected status should actually be 201 Created
+ path = '/v2.0/endpoints'
+ body = {
+ 'endpoint': {
+ 'adminurl': adminurl,
+ 'service_id': service_id,
+ 'region': 'RegionOne',
+ 'internalurl': internalurl,
+ 'publicurl': publicurl
+ }
+ }
+
+ r = self.admin_request(method='POST', token=self.get_scoped_token(),
+ path=path, expected_status=expected_status,
+ body=body)
+ return body, r
+
+ def test_endpoint_create(self):
+ req_body, response = self._endpoint_create()
+ self.assertIn('endpoint', response.result)
+ self.assertIn('id', response.result['endpoint'])
+ for field, value in six.iteritems(req_body['endpoint']):
+ self.assertEqual(response.result['endpoint'][field], value)
+
+ def test_endpoint_create_with_null_adminurl(self):
+ req_body, response = self._endpoint_create(adminurl=None)
+ self.assertIsNone(req_body['endpoint']['adminurl'])
+ self.assertNotIn('adminurl', response.result['endpoint'])
+
+ def test_endpoint_create_with_empty_adminurl(self):
+ req_body, response = self._endpoint_create(adminurl='')
+ self.assertEqual('', req_body['endpoint']['adminurl'])
+ self.assertNotIn("adminurl", response.result['endpoint'])
+
+ def test_endpoint_create_with_null_internalurl(self):
+ req_body, response = self._endpoint_create(internalurl=None)
+ self.assertIsNone(req_body['endpoint']['internalurl'])
+ self.assertNotIn('internalurl', response.result['endpoint'])
+
+ def test_endpoint_create_with_empty_internalurl(self):
+ req_body, response = self._endpoint_create(internalurl='')
+ self.assertEqual('', req_body['endpoint']['internalurl'])
+ self.assertNotIn("internalurl", response.result['endpoint'])
+
+ def test_endpoint_create_with_null_publicurl(self):
+ self._endpoint_create(expected_status=400, publicurl=None)
+
+ def test_endpoint_create_with_empty_publicurl(self):
+ self._endpoint_create(expected_status=400, publicurl='')
+
+ def test_endpoint_create_with_null_service_id(self):
+ self._endpoint_create(expected_status=400, service_id=None)
+
+ def test_endpoint_create_with_empty_service_id(self):
+ self._endpoint_create(expected_status=400, service_id='')
+
+
+class TestV2CatalogAPISQL(tests.TestCase):
+
+ def setUp(self):
+ super(TestV2CatalogAPISQL, self).setUp()
+ self.useFixture(database.Database())
+ self.catalog_api = catalog.Manager()
+
+ self.service_id = uuid.uuid4().hex
+ service = {'id': self.service_id, 'name': uuid.uuid4().hex}
+ self.catalog_api.create_service(self.service_id, service)
+
+ endpoint = self.new_endpoint_ref(service_id=self.service_id)
+ self.catalog_api.create_endpoint(endpoint['id'], endpoint)
+
+ def config_overrides(self):
+ super(TestV2CatalogAPISQL, self).config_overrides()
+ self.config_fixture.config(
+ group='catalog',
+ driver='keystone.catalog.backends.sql.Catalog')
+
+ def new_endpoint_ref(self, service_id):
+ return {
+ 'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'description': uuid.uuid4().hex,
+ 'interface': uuid.uuid4().hex[:8],
+ 'service_id': service_id,
+ 'url': uuid.uuid4().hex,
+ 'region': uuid.uuid4().hex,
+ }
+
+ def test_get_catalog_ignores_endpoints_with_invalid_urls(self):
+ user_id = uuid.uuid4().hex
+ tenant_id = uuid.uuid4().hex
+
+ # the only endpoint in the catalog is the one created in setUp
+ catalog = self.catalog_api.get_catalog(user_id, tenant_id)
+ self.assertEqual(1, len(catalog))
+ # it's also the only endpoint in the backend
+ self.assertEqual(1, len(self.catalog_api.list_endpoints()))
+
+ # create a new, invalid endpoint - malformed type declaration
+ endpoint = self.new_endpoint_ref(self.service_id)
+ endpoint['url'] = 'http://keystone/%(tenant_id)'
+ self.catalog_api.create_endpoint(endpoint['id'], endpoint)
+
+ # create a new, invalid endpoint - nonexistent key
+ endpoint = self.new_endpoint_ref(self.service_id)
+ endpoint['url'] = 'http://keystone/%(you_wont_find_me)s'
+ self.catalog_api.create_endpoint(endpoint['id'], endpoint)
+
+ # verify that the invalid endpoints don't appear in the catalog
+ catalog = self.catalog_api.get_catalog(user_id, tenant_id)
+ self.assertEqual(1, len(catalog))
+ # all three endpoints appear in the backend
+ self.assertEqual(3, len(self.catalog_api.list_endpoints()))
+
+ def test_get_catalog_always_returns_service_name(self):
+ user_id = uuid.uuid4().hex
+ tenant_id = uuid.uuid4().hex
+
+ # create a service, with a name
+ named_svc = {
+ 'id': uuid.uuid4().hex,
+ 'type': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ }
+ self.catalog_api.create_service(named_svc['id'], named_svc)
+ endpoint = self.new_endpoint_ref(service_id=named_svc['id'])
+ self.catalog_api.create_endpoint(endpoint['id'], endpoint)
+
+ # create a service, with no name
+ unnamed_svc = {
+ 'id': uuid.uuid4().hex,
+ 'type': uuid.uuid4().hex
+ }
+ self.catalog_api.create_service(unnamed_svc['id'], unnamed_svc)
+ endpoint = self.new_endpoint_ref(service_id=unnamed_svc['id'])
+ self.catalog_api.create_endpoint(endpoint['id'], endpoint)
+
+ region = None
+ catalog = self.catalog_api.get_catalog(user_id, tenant_id)
+
+ self.assertEqual(named_svc['name'],
+ catalog[region][named_svc['type']]['name'])
+ self.assertEqual('', catalog[region][unnamed_svc['type']]['name'])
diff --git a/keystone-moon/keystone/tests/unit/test_cert_setup.py b/keystone-moon/keystone/tests/unit/test_cert_setup.py
new file mode 100644
index 00000000..d1e9ccfd
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/test_cert_setup.py
@@ -0,0 +1,246 @@
+# Copyright 2012 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import shutil
+
+import mock
+from testtools import matchers
+
+from keystone.common import environment
+from keystone.common import openssl
+from keystone import exception
+from keystone.tests import unit as tests
+from keystone.tests.unit import rest
+from keystone import token
+
+
+SSLDIR = tests.dirs.tmp('ssl')
+CONF = tests.CONF
+DEFAULT_DOMAIN_ID = CONF.identity.default_domain_id
+
+
+CERTDIR = os.path.join(SSLDIR, 'certs')
+KEYDIR = os.path.join(SSLDIR, 'private')
+
+
+class CertSetupTestCase(rest.RestfulTestCase):
+
+ def setUp(self):
+ super(CertSetupTestCase, self).setUp()
+
+ def cleanup_ssldir():
+ try:
+ shutil.rmtree(SSLDIR)
+ except OSError:
+ pass
+
+ self.addCleanup(cleanup_ssldir)
+
+ def config_overrides(self):
+ super(CertSetupTestCase, self).config_overrides()
+ ca_certs = os.path.join(CERTDIR, 'ca.pem')
+ ca_key = os.path.join(CERTDIR, 'cakey.pem')
+
+ self.config_fixture.config(
+ group='signing',
+ certfile=os.path.join(CERTDIR, 'signing_cert.pem'),
+ ca_certs=ca_certs,
+ ca_key=ca_key,
+ keyfile=os.path.join(KEYDIR, 'signing_key.pem'))
+ self.config_fixture.config(
+ group='ssl',
+ ca_key=ca_key)
+ self.config_fixture.config(
+ group='eventlet_server_ssl',
+ ca_certs=ca_certs,
+ certfile=os.path.join(CERTDIR, 'keystone.pem'),
+ keyfile=os.path.join(KEYDIR, 'keystonekey.pem'))
+ self.config_fixture.config(
+ group='token',
+ provider='keystone.token.providers.pkiz.Provider')
+
+ def test_can_handle_missing_certs(self):
+ controller = token.controllers.Auth()
+
+ self.config_fixture.config(group='signing', certfile='invalid')
+ password = 'fake1'
+ user = {
+ 'name': 'fake1',
+ 'password': password,
+ 'domain_id': DEFAULT_DOMAIN_ID
+ }
+ user = self.identity_api.create_user(user)
+ body_dict = {
+ 'passwordCredentials': {
+ 'userId': user['id'],
+ 'password': password,
+ },
+ }
+ self.assertRaises(exception.UnexpectedError,
+ controller.authenticate,
+ {}, body_dict)
+
+ def test_create_pki_certs(self, rebuild=False):
+ pki = openssl.ConfigurePKI(None, None, rebuild=rebuild)
+ pki.run()
+ self.assertTrue(os.path.exists(CONF.signing.certfile))
+ self.assertTrue(os.path.exists(CONF.signing.ca_certs))
+ self.assertTrue(os.path.exists(CONF.signing.keyfile))
+
+ def test_create_ssl_certs(self, rebuild=False):
+ ssl = openssl.ConfigureSSL(None, None, rebuild=rebuild)
+ ssl.run()
+ self.assertTrue(os.path.exists(CONF.eventlet_server_ssl.ca_certs))
+ self.assertTrue(os.path.exists(CONF.eventlet_server_ssl.certfile))
+ self.assertTrue(os.path.exists(CONF.eventlet_server_ssl.keyfile))
+
+ def test_fetch_signing_cert(self, rebuild=False):
+ pki = openssl.ConfigurePKI(None, None, rebuild=rebuild)
+ pki.run()
+
+ # NOTE(jamielennox): Use request directly because certificate
+ # requests don't have some of the normal information
+ signing_resp = self.request(self.public_app,
+ '/v2.0/certificates/signing',
+ method='GET', expected_status=200)
+
+ cacert_resp = self.request(self.public_app,
+ '/v2.0/certificates/ca',
+ method='GET', expected_status=200)
+
+ with open(CONF.signing.certfile) as f:
+ self.assertEqual(f.read(), signing_resp.text)
+
+ with open(CONF.signing.ca_certs) as f:
+ self.assertEqual(f.read(), cacert_resp.text)
+
+ # NOTE(jamielennox): This is weird behaviour that we need to enforce.
+ # It doesn't matter what you ask for it's always going to give text
+ # with a text/html content_type.
+
+ for path in ['/v2.0/certificates/signing', '/v2.0/certificates/ca']:
+ for accept in [None, 'text/html', 'application/json', 'text/xml']:
+ headers = {'Accept': accept} if accept else {}
+ resp = self.request(self.public_app, path, method='GET',
+ expected_status=200,
+ headers=headers)
+
+ self.assertEqual('text/html', resp.content_type)
+
+ def test_fetch_signing_cert_when_rebuild(self):
+ pki = openssl.ConfigurePKI(None, None)
+ pki.run()
+ self.test_fetch_signing_cert(rebuild=True)
+
+ def test_failure(self):
+ for path in ['/v2.0/certificates/signing', '/v2.0/certificates/ca']:
+ self.request(self.public_app, path, method='GET',
+ expected_status=500)
+
+ def test_pki_certs_rebuild(self):
+ self.test_create_pki_certs()
+ with open(CONF.signing.certfile) as f:
+ cert_file1 = f.read()
+
+ self.test_create_pki_certs(rebuild=True)
+ with open(CONF.signing.certfile) as f:
+ cert_file2 = f.read()
+
+ self.assertNotEqual(cert_file1, cert_file2)
+
+ def test_ssl_certs_rebuild(self):
+ self.test_create_ssl_certs()
+ with open(CONF.eventlet_server_ssl.certfile) as f:
+ cert_file1 = f.read()
+
+ self.test_create_ssl_certs(rebuild=True)
+ with open(CONF.eventlet_server_ssl.certfile) as f:
+ cert_file2 = f.read()
+
+ self.assertNotEqual(cert_file1, cert_file2)
+
+ @mock.patch.object(os, 'remove')
+ def test_rebuild_pki_certs_remove_error(self, mock_remove):
+ self.test_create_pki_certs()
+ with open(CONF.signing.certfile) as f:
+ cert_file1 = f.read()
+
+ mock_remove.side_effect = OSError()
+ self.test_create_pki_certs(rebuild=True)
+ with open(CONF.signing.certfile) as f:
+ cert_file2 = f.read()
+
+ self.assertEqual(cert_file1, cert_file2)
+
+ @mock.patch.object(os, 'remove')
+ def test_rebuild_ssl_certs_remove_error(self, mock_remove):
+ self.test_create_ssl_certs()
+ with open(CONF.eventlet_server_ssl.certfile) as f:
+ cert_file1 = f.read()
+
+ mock_remove.side_effect = OSError()
+ self.test_create_ssl_certs(rebuild=True)
+ with open(CONF.eventlet_server_ssl.certfile) as f:
+ cert_file2 = f.read()
+
+ self.assertEqual(cert_file1, cert_file2)
+
+ def test_create_pki_certs_twice_without_rebuild(self):
+ self.test_create_pki_certs()
+ with open(CONF.signing.certfile) as f:
+ cert_file1 = f.read()
+
+ self.test_create_pki_certs()
+ with open(CONF.signing.certfile) as f:
+ cert_file2 = f.read()
+
+ self.assertEqual(cert_file1, cert_file2)
+
+ def test_create_ssl_certs_twice_without_rebuild(self):
+ self.test_create_ssl_certs()
+ with open(CONF.eventlet_server_ssl.certfile) as f:
+ cert_file1 = f.read()
+
+ self.test_create_ssl_certs()
+ with open(CONF.eventlet_server_ssl.certfile) as f:
+ cert_file2 = f.read()
+
+ self.assertEqual(cert_file1, cert_file2)
+
+
+class TestExecCommand(tests.TestCase):
+
+ @mock.patch.object(environment.subprocess.Popen, 'poll')
+ def test_running_a_successful_command(self, mock_poll):
+ mock_poll.return_value = 0
+
+ ssl = openssl.ConfigureSSL('keystone_user', 'keystone_group')
+ ssl.exec_command(['ls'])
+
+ @mock.patch.object(environment.subprocess.Popen, 'communicate')
+ @mock.patch.object(environment.subprocess.Popen, 'poll')
+ def test_running_an_invalid_command(self, mock_poll, mock_communicate):
+ output = 'this is the output string'
+
+ mock_communicate.return_value = (output, '')
+ mock_poll.return_value = 1
+
+ cmd = ['ls']
+ ssl = openssl.ConfigureSSL('keystone_user', 'keystone_group')
+ e = self.assertRaises(environment.subprocess.CalledProcessError,
+ ssl.exec_command,
+ cmd)
+ self.assertThat(e.output, matchers.Equals(output))
diff --git a/keystone-moon/keystone/tests/unit/test_cli.py b/keystone-moon/keystone/tests/unit/test_cli.py
new file mode 100644
index 00000000..20aa03e6
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/test_cli.py
@@ -0,0 +1,252 @@
+# Copyright 2014 IBM Corp.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import os
+import uuid
+
+import mock
+from oslo_config import cfg
+
+from keystone import cli
+from keystone.common import dependency
+from keystone.i18n import _
+from keystone import resource
+from keystone.tests import unit as tests
+from keystone.tests.unit.ksfixtures import database
+
+CONF = cfg.CONF
+
+
+class CliTestCase(tests.SQLDriverOverrides, tests.TestCase):
+ def config_files(self):
+ config_files = super(CliTestCase, self).config_files()
+ config_files.append(tests.dirs.tests_conf('backend_sql.conf'))
+ return config_files
+
+ def test_token_flush(self):
+ self.useFixture(database.Database())
+ self.load_backends()
+ cli.TokenFlush.main()
+
+
+class CliDomainConfigAllTestCase(tests.SQLDriverOverrides, tests.TestCase):
+
+ def setUp(self):
+ self.useFixture(database.Database())
+ super(CliDomainConfigAllTestCase, self).setUp()
+ self.load_backends()
+ self.config_fixture.config(
+ group='identity',
+ domain_config_dir=tests.TESTCONF + '/domain_configs_multi_ldap')
+ self.domain_count = 3
+ self.setup_initial_domains()
+
+ def config_files(self):
+ self.config_fixture.register_cli_opt(cli.command_opt)
+ self.addCleanup(self.cleanup)
+ config_files = super(CliDomainConfigAllTestCase, self).config_files()
+ config_files.append(tests.dirs.tests_conf('backend_sql.conf'))
+ return config_files
+
+ def cleanup(self):
+ CONF.reset()
+ CONF.unregister_opt(cli.command_opt)
+
+ def cleanup_domains(self):
+ for domain in self.domains:
+ if domain == 'domain_default':
+ # Not allowed to delete the default domain, but should at least
+ # delete any domain-specific config for it.
+ self.domain_config_api.delete_config(
+ CONF.identity.default_domain_id)
+ continue
+ this_domain = self.domains[domain]
+ this_domain['enabled'] = False
+ self.resource_api.update_domain(this_domain['id'], this_domain)
+ self.resource_api.delete_domain(this_domain['id'])
+ self.domains = {}
+
+ def config(self, config_files):
+ CONF(args=['domain_config_upload', '--all'], project='keystone',
+ default_config_files=config_files)
+
+ def setup_initial_domains(self):
+
+ def create_domain(domain):
+ return self.resource_api.create_domain(domain['id'], domain)
+
+ self.domains = {}
+ self.addCleanup(self.cleanup_domains)
+ for x in range(1, self.domain_count):
+ domain = 'domain%s' % x
+ self.domains[domain] = create_domain(
+ {'id': uuid.uuid4().hex, 'name': domain})
+ self.domains['domain_default'] = create_domain(
+ resource.calc_default_domain())
+
+ def test_config_upload(self):
+ # The values below are the same as in the domain_configs_multi_ldap
+ # directory of test config_files.
+ default_config = {
+ 'ldap': {'url': 'fake://memory',
+ 'user': 'cn=Admin',
+ 'password': 'password',
+ 'suffix': 'cn=example,cn=com'},
+ 'identity': {'driver': 'keystone.identity.backends.ldap.Identity'}
+ }
+ domain1_config = {
+ 'ldap': {'url': 'fake://memory1',
+ 'user': 'cn=Admin',
+ 'password': 'password',
+ 'suffix': 'cn=example,cn=com'},
+ 'identity': {'driver': 'keystone.identity.backends.ldap.Identity'}
+ }
+ domain2_config = {
+ 'ldap': {'url': 'fake://memory',
+ 'user': 'cn=Admin',
+ 'password': 'password',
+ 'suffix': 'cn=myroot,cn=com',
+ 'group_tree_dn': 'ou=UserGroups,dc=myroot,dc=org',
+ 'user_tree_dn': 'ou=Users,dc=myroot,dc=org'},
+ 'identity': {'driver': 'keystone.identity.backends.ldap.Identity'}
+ }
+
+ # Clear backend dependencies, since cli loads these manually
+ dependency.reset()
+ cli.DomainConfigUpload.main()
+
+ res = self.domain_config_api.get_config_with_sensitive_info(
+ CONF.identity.default_domain_id)
+ self.assertEqual(default_config, res)
+ res = self.domain_config_api.get_config_with_sensitive_info(
+ self.domains['domain1']['id'])
+ self.assertEqual(domain1_config, res)
+ res = self.domain_config_api.get_config_with_sensitive_info(
+ self.domains['domain2']['id'])
+ self.assertEqual(domain2_config, res)
+
+
+class CliDomainConfigSingleDomainTestCase(CliDomainConfigAllTestCase):
+
+ def config(self, config_files):
+ CONF(args=['domain_config_upload', '--domain-name', 'Default'],
+ project='keystone', default_config_files=config_files)
+
+ def test_config_upload(self):
+ # The values below are the same as in the domain_configs_multi_ldap
+ # directory of test config_files.
+ default_config = {
+ 'ldap': {'url': 'fake://memory',
+ 'user': 'cn=Admin',
+ 'password': 'password',
+ 'suffix': 'cn=example,cn=com'},
+ 'identity': {'driver': 'keystone.identity.backends.ldap.Identity'}
+ }
+
+ # Clear backend dependencies, since cli loads these manually
+ dependency.reset()
+ cli.DomainConfigUpload.main()
+
+ res = self.domain_config_api.get_config_with_sensitive_info(
+ CONF.identity.default_domain_id)
+ self.assertEqual(default_config, res)
+ res = self.domain_config_api.get_config_with_sensitive_info(
+ self.domains['domain1']['id'])
+ self.assertEqual({}, res)
+ res = self.domain_config_api.get_config_with_sensitive_info(
+ self.domains['domain2']['id'])
+ self.assertEqual({}, res)
+
+ def test_no_overwrite_config(self):
+ # Create a config for the default domain
+ default_config = {
+ 'ldap': {'url': uuid.uuid4().hex},
+ 'identity': {'driver': 'keystone.identity.backends.ldap.Identity'}
+ }
+ self.domain_config_api.create_config(
+ CONF.identity.default_domain_id, default_config)
+
+ # Now try and upload the settings in the configuration file for the
+ # default domain
+ dependency.reset()
+ with mock.patch('__builtin__.print') as mock_print:
+ self.assertRaises(SystemExit, cli.DomainConfigUpload.main)
+ file_name = ('keystone.%s.conf' %
+ resource.calc_default_domain()['name'])
+ error_msg = _(
+ 'Domain: %(domain)s already has a configuration defined - '
+ 'ignoring file: %(file)s.') % {
+ 'domain': resource.calc_default_domain()['name'],
+ 'file': os.path.join(CONF.identity.domain_config_dir,
+ file_name)}
+ mock_print.assert_has_calls([mock.call(error_msg)])
+
+ res = self.domain_config_api.get_config(
+ CONF.identity.default_domain_id)
+ # The initial config should not have been overwritten
+ self.assertEqual(default_config, res)
+
+
+class CliDomainConfigNoOptionsTestCase(CliDomainConfigAllTestCase):
+
+ def config(self, config_files):
+ CONF(args=['domain_config_upload'],
+ project='keystone', default_config_files=config_files)
+
+ def test_config_upload(self):
+ dependency.reset()
+ with mock.patch('__builtin__.print') as mock_print:
+ self.assertRaises(SystemExit, cli.DomainConfigUpload.main)
+ mock_print.assert_has_calls(
+ [mock.call(
+ _('At least one option must be provided, use either '
+ '--all or --domain-name'))])
+
+
+class CliDomainConfigTooManyOptionsTestCase(CliDomainConfigAllTestCase):
+
+ def config(self, config_files):
+ CONF(args=['domain_config_upload', '--all', '--domain-name',
+ 'Default'],
+ project='keystone', default_config_files=config_files)
+
+ def test_config_upload(self):
+ dependency.reset()
+ with mock.patch('__builtin__.print') as mock_print:
+ self.assertRaises(SystemExit, cli.DomainConfigUpload.main)
+ mock_print.assert_has_calls(
+ [mock.call(_('The --all option cannot be used with '
+ 'the --domain-name option'))])
+
+
+class CliDomainConfigInvalidDomainTestCase(CliDomainConfigAllTestCase):
+
+ def config(self, config_files):
+ self.invalid_domain_name = uuid.uuid4().hex
+ CONF(args=['domain_config_upload', '--domain-name',
+ self.invalid_domain_name],
+ project='keystone', default_config_files=config_files)
+
+ def test_config_upload(self):
+ dependency.reset()
+ with mock.patch('__builtin__.print') as mock_print:
+ self.assertRaises(SystemExit, cli.DomainConfigUpload.main)
+ file_name = 'keystone.%s.conf' % self.invalid_domain_name
+ error_msg = (_(
+ 'Invalid domain name: %(domain)s found in config file name: '
+ '%(file)s - ignoring this file.') % {
+ 'domain': self.invalid_domain_name,
+ 'file': os.path.join(CONF.identity.domain_config_dir,
+ file_name)})
+ mock_print.assert_has_calls([mock.call(error_msg)])
diff --git a/keystone-moon/keystone/tests/unit/test_config.py b/keystone-moon/keystone/tests/unit/test_config.py
new file mode 100644
index 00000000..15cfac81
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/test_config.py
@@ -0,0 +1,84 @@
+# Copyright 2013 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import uuid
+
+from oslo_config import cfg
+
+from keystone import config
+from keystone import exception
+from keystone.tests import unit as tests
+
+
+CONF = cfg.CONF
+
+
+class ConfigTestCase(tests.TestCase):
+
+ def config_files(self):
+ config_files = super(ConfigTestCase, self).config_files()
+ # Insert the keystone sample as the first config file to be loaded
+ # since it is used in one of the code paths to determine the paste-ini
+ # location.
+ config_files.insert(0, tests.dirs.etc('keystone.conf.sample'))
+ return config_files
+
+ def test_paste_config(self):
+ self.assertEqual(tests.dirs.etc('keystone-paste.ini'),
+ config.find_paste_config())
+ self.config_fixture.config(group='paste_deploy',
+ config_file=uuid.uuid4().hex)
+ self.assertRaises(exception.ConfigFileNotFound,
+ config.find_paste_config)
+ self.config_fixture.config(group='paste_deploy', config_file='')
+ self.assertEqual(tests.dirs.etc('keystone.conf.sample'),
+ config.find_paste_config())
+
+ def test_config_default(self):
+ self.assertEqual('keystone.auth.plugins.password.Password',
+ CONF.auth.password)
+ self.assertEqual('keystone.auth.plugins.token.Token',
+ CONF.auth.token)
+
+
+class DeprecatedTestCase(tests.TestCase):
+ """Test using the original (deprecated) name for renamed options."""
+
+ def config_files(self):
+ config_files = super(DeprecatedTestCase, self).config_files()
+ config_files.append(tests.dirs.tests_conf('deprecated.conf'))
+ return config_files
+
+ def test_sql(self):
+ # Options in [sql] were moved to [database] in Icehouse for the change
+ # to use oslo-incubator's db.sqlalchemy.sessions.
+
+ self.assertEqual('sqlite://deprecated', CONF.database.connection)
+ self.assertEqual(54321, CONF.database.idle_timeout)
+
+
+class DeprecatedOverrideTestCase(tests.TestCase):
+ """Test using the deprecated AND new name for renamed options."""
+
+ def config_files(self):
+ config_files = super(DeprecatedOverrideTestCase, self).config_files()
+ config_files.append(tests.dirs.tests_conf('deprecated_override.conf'))
+ return config_files
+
+ def test_sql(self):
+ # Options in [sql] were moved to [database] in Icehouse for the change
+ # to use oslo-incubator's db.sqlalchemy.sessions.
+
+ self.assertEqual('sqlite://new', CONF.database.connection)
+ self.assertEqual(65432, CONF.database.idle_timeout)
diff --git a/keystone-moon/keystone/tests/unit/test_contrib_s3_core.py b/keystone-moon/keystone/tests/unit/test_contrib_s3_core.py
new file mode 100644
index 00000000..43ea1ac5
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/test_contrib_s3_core.py
@@ -0,0 +1,55 @@
+# Copyright 2012 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import uuid
+
+from keystone.contrib import s3
+from keystone import exception
+from keystone.tests import unit as tests
+
+
+class S3ContribCore(tests.TestCase):
+ def setUp(self):
+ super(S3ContribCore, self).setUp()
+
+ self.load_backends()
+
+ self.controller = s3.S3Controller()
+
+ def test_good_signature(self):
+ creds_ref = {'secret':
+ 'b121dd41cdcc42fe9f70e572e84295aa'}
+ credentials = {'token':
+ 'UFVUCjFCMk0yWThBc2dUcGdBbVk3UGhDZmc9PQphcHB'
+ 'saWNhdGlvbi9vY3RldC1zdHJlYW0KVHVlLCAxMSBEZWMgMjAxM'
+ 'iAyMTo0MTo0MSBHTVQKL2NvbnRfczMvdXBsb2FkZWRfZnJ'
+ 'vbV9zMy50eHQ=',
+ 'signature': 'IL4QLcLVaYgylF9iHj6Wb8BGZsw='}
+
+ self.assertIsNone(self.controller.check_signature(creds_ref,
+ credentials))
+
+ def test_bad_signature(self):
+ creds_ref = {'secret':
+ 'b121dd41cdcc42fe9f70e572e84295aa'}
+ credentials = {'token':
+ 'UFVUCjFCMk0yWThBc2dUcGdBbVk3UGhDZmc9PQphcHB'
+ 'saWNhdGlvbi9vY3RldC1zdHJlYW0KVHVlLCAxMSBEZWMgMjAxM'
+ 'iAyMTo0MTo0MSBHTVQKL2NvbnRfczMvdXBsb2FkZWRfZnJ'
+ 'vbV9zMy50eHQ=',
+ 'signature': uuid.uuid4().hex}
+
+ self.assertRaises(exception.Unauthorized,
+ self.controller.check_signature,
+ creds_ref, credentials)
diff --git a/keystone-moon/keystone/tests/unit/test_contrib_simple_cert.py b/keystone-moon/keystone/tests/unit/test_contrib_simple_cert.py
new file mode 100644
index 00000000..8664e2c3
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/test_contrib_simple_cert.py
@@ -0,0 +1,57 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import uuid
+
+from keystone.tests.unit import test_v3
+
+
+class BaseTestCase(test_v3.RestfulTestCase):
+
+ EXTENSION_TO_ADD = 'simple_cert_extension'
+
+ CA_PATH = '/v3/OS-SIMPLE-CERT/ca'
+ CERT_PATH = '/v3/OS-SIMPLE-CERT/certificates'
+
+
+class TestSimpleCert(BaseTestCase):
+
+ def request_cert(self, path):
+ content_type = 'application/x-pem-file'
+ response = self.request(app=self.public_app,
+ method='GET',
+ path=path,
+ headers={'Accept': content_type},
+ expected_status=200)
+
+ self.assertEqual(content_type, response.content_type.lower())
+ self.assertIn('---BEGIN', response.body)
+
+ return response
+
+ def test_ca_cert(self):
+ self.request_cert(self.CA_PATH)
+
+ def test_signing_cert(self):
+ self.request_cert(self.CERT_PATH)
+
+ def test_missing_file(self):
+ # these files do not exist
+ self.config_fixture.config(group='signing',
+ ca_certs=uuid.uuid4().hex,
+ certfile=uuid.uuid4().hex)
+
+ for path in [self.CA_PATH, self.CERT_PATH]:
+ self.request(app=self.public_app,
+ method='GET',
+ path=path,
+ expected_status=500)
diff --git a/keystone-moon/keystone/tests/unit/test_driver_hints.py b/keystone-moon/keystone/tests/unit/test_driver_hints.py
new file mode 100644
index 00000000..c20d2ae7
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/test_driver_hints.py
@@ -0,0 +1,60 @@
+# Copyright 2013 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from keystone.common import driver_hints
+from keystone.tests.unit import core as test
+
+
+class ListHintsTests(test.TestCase):
+
+ def test_create_iterate_satisfy(self):
+ hints = driver_hints.Hints()
+ hints.add_filter('t1', 'data1')
+ hints.add_filter('t2', 'data2')
+ self.assertEqual(2, len(hints.filters))
+ filter = hints.get_exact_filter_by_name('t1')
+ self.assertEqual('t1', filter['name'])
+ self.assertEqual('data1', filter['value'])
+ self.assertEqual('equals', filter['comparator'])
+ self.assertEqual(False, filter['case_sensitive'])
+
+ hints.filters.remove(filter)
+ filter_count = 0
+ for filter in hints.filters:
+ filter_count += 1
+ self.assertEqual('t2', filter['name'])
+ self.assertEqual(1, filter_count)
+
+ def test_multiple_creates(self):
+ hints = driver_hints.Hints()
+ hints.add_filter('t1', 'data1')
+ hints.add_filter('t2', 'data2')
+ self.assertEqual(2, len(hints.filters))
+ hints2 = driver_hints.Hints()
+ hints2.add_filter('t4', 'data1')
+ hints2.add_filter('t5', 'data2')
+ self.assertEqual(2, len(hints.filters))
+
+ def test_limits(self):
+ hints = driver_hints.Hints()
+ self.assertIsNone(hints.limit)
+ hints.set_limit(10)
+ self.assertEqual(10, hints.limit['limit'])
+ self.assertFalse(hints.limit['truncated'])
+ hints.set_limit(11)
+ self.assertEqual(11, hints.limit['limit'])
+ self.assertFalse(hints.limit['truncated'])
+ hints.set_limit(10, truncated=True)
+ self.assertEqual(10, hints.limit['limit'])
+ self.assertTrue(hints.limit['truncated'])
diff --git a/keystone-moon/keystone/tests/unit/test_ec2_token_middleware.py b/keystone-moon/keystone/tests/unit/test_ec2_token_middleware.py
new file mode 100644
index 00000000..03c95e27
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/test_ec2_token_middleware.py
@@ -0,0 +1,34 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from keystonemiddleware import ec2_token as ksm_ec2_token
+
+from keystone.middleware import ec2_token
+from keystone.tests import unit as tests
+
+
+class EC2TokenMiddlewareTestBase(tests.BaseTestCase):
+ def test_symbols(self):
+ """Verify ec2 middleware symbols.
+
+ Verify that the keystone version of ec2_token middleware forwards the
+ public symbols from the keystonemiddleware version of the ec2_token
+ middleware for backwards compatibility.
+
+ """
+
+ self.assertIs(ksm_ec2_token.app_factory, ec2_token.app_factory)
+ self.assertIs(ksm_ec2_token.filter_factory, ec2_token.filter_factory)
+ self.assertTrue(
+ issubclass(ec2_token.EC2Token, ksm_ec2_token.EC2Token),
+ 'ec2_token.EC2Token is not subclass of '
+ 'keystonemiddleware.ec2_token.EC2Token')
diff --git a/keystone-moon/keystone/tests/unit/test_exception.py b/keystone-moon/keystone/tests/unit/test_exception.py
new file mode 100644
index 00000000..f91fa2a7
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/test_exception.py
@@ -0,0 +1,227 @@
+# Copyright 2012 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import uuid
+
+from oslo_config import cfg
+from oslo_config import fixture as config_fixture
+from oslo_serialization import jsonutils
+import six
+
+from keystone.common import wsgi
+from keystone import exception
+from keystone.tests import unit as tests
+
+
+class ExceptionTestCase(tests.BaseTestCase):
+ def assertValidJsonRendering(self, e):
+ resp = wsgi.render_exception(e)
+ self.assertEqual(e.code, resp.status_int)
+ self.assertEqual('%s %s' % (e.code, e.title), resp.status)
+
+ j = jsonutils.loads(resp.body)
+ self.assertIsNotNone(j.get('error'))
+ self.assertIsNotNone(j['error'].get('code'))
+ self.assertIsNotNone(j['error'].get('title'))
+ self.assertIsNotNone(j['error'].get('message'))
+ self.assertNotIn('\n', j['error']['message'])
+ self.assertNotIn(' ', j['error']['message'])
+ self.assertTrue(type(j['error']['code']) is int)
+
+ def test_all_json_renderings(self):
+ """Everything callable in the exception module should be renderable.
+
+ ... except for the base error class (exception.Error), which is not
+ user-facing.
+
+ This test provides a custom message to bypass docstring parsing, which
+ should be tested separately.
+
+ """
+ for cls in [x for x in exception.__dict__.values() if callable(x)]:
+ if cls is not exception.Error and isinstance(cls, exception.Error):
+ self.assertValidJsonRendering(cls(message='Overridden.'))
+
+ def test_validation_error(self):
+ target = uuid.uuid4().hex
+ attribute = uuid.uuid4().hex
+ e = exception.ValidationError(target=target, attribute=attribute)
+ self.assertValidJsonRendering(e)
+ self.assertIn(target, six.text_type(e))
+ self.assertIn(attribute, six.text_type(e))
+
+ def test_not_found(self):
+ target = uuid.uuid4().hex
+ e = exception.NotFound(target=target)
+ self.assertValidJsonRendering(e)
+ self.assertIn(target, six.text_type(e))
+
+ def test_403_title(self):
+ e = exception.Forbidden()
+ resp = wsgi.render_exception(e)
+ j = jsonutils.loads(resp.body)
+ self.assertEqual('Forbidden', e.title)
+ self.assertEqual('Forbidden', j['error'].get('title'))
+
+ def test_unicode_message(self):
+ message = u'Comment \xe7a va'
+ e = exception.Error(message)
+
+ try:
+ self.assertEqual(message, six.text_type(e))
+ except UnicodeEncodeError:
+ self.fail("unicode error message not supported")
+
+ def test_unicode_string(self):
+ e = exception.ValidationError(attribute='xx',
+ target='Long \xe2\x80\x93 Dash')
+
+ self.assertIn(u'\u2013', six.text_type(e))
+
+ def test_invalid_unicode_string(self):
+ # NOTE(jamielennox): This is a complete failure case so what is
+ # returned in the exception message is not that important so long
+ # as there is an error with a message
+ e = exception.ValidationError(attribute='xx',
+ target='\xe7a va')
+ self.assertIn('%(attribute)', six.text_type(e))
+
+
+class UnexpectedExceptionTestCase(ExceptionTestCase):
+ """Tests if internal info is exposed to the API user on UnexpectedError."""
+
+ class SubClassExc(exception.UnexpectedError):
+ debug_message_format = 'Debug Message: %(debug_info)s'
+
+ def setUp(self):
+ super(UnexpectedExceptionTestCase, self).setUp()
+ self.exc_str = uuid.uuid4().hex
+ self.config_fixture = self.useFixture(config_fixture.Config(cfg.CONF))
+
+ def test_unexpected_error_no_debug(self):
+ self.config_fixture.config(debug=False)
+ e = exception.UnexpectedError(exception=self.exc_str)
+ self.assertNotIn(self.exc_str, six.text_type(e))
+
+ def test_unexpected_error_debug(self):
+ self.config_fixture.config(debug=True)
+ e = exception.UnexpectedError(exception=self.exc_str)
+ self.assertIn(self.exc_str, six.text_type(e))
+
+ def test_unexpected_error_subclass_no_debug(self):
+ self.config_fixture.config(debug=False)
+ e = UnexpectedExceptionTestCase.SubClassExc(
+ debug_info=self.exc_str)
+ self.assertEqual(exception.UnexpectedError._message_format,
+ six.text_type(e))
+
+ def test_unexpected_error_subclass_debug(self):
+ self.config_fixture.config(debug=True)
+ subclass = self.SubClassExc
+
+ e = subclass(debug_info=self.exc_str)
+ expected = subclass.debug_message_format % {'debug_info': self.exc_str}
+ translated_amendment = six.text_type(exception.SecurityError.amendment)
+ self.assertEqual(
+ expected + six.text_type(' ') + translated_amendment,
+ six.text_type(e))
+
+ def test_unexpected_error_custom_message_no_debug(self):
+ self.config_fixture.config(debug=False)
+ e = exception.UnexpectedError(self.exc_str)
+ self.assertEqual(exception.UnexpectedError._message_format,
+ six.text_type(e))
+
+ def test_unexpected_error_custom_message_debug(self):
+ self.config_fixture.config(debug=True)
+ e = exception.UnexpectedError(self.exc_str)
+ translated_amendment = six.text_type(exception.SecurityError.amendment)
+ self.assertEqual(
+ self.exc_str + six.text_type(' ') + translated_amendment,
+ six.text_type(e))
+
+
+class SecurityErrorTestCase(ExceptionTestCase):
+ """Tests whether security-related info is exposed to the API user."""
+
+ def setUp(self):
+ super(SecurityErrorTestCase, self).setUp()
+ self.config_fixture = self.useFixture(config_fixture.Config(cfg.CONF))
+
+ def test_unauthorized_exposure(self):
+ self.config_fixture.config(debug=False)
+
+ risky_info = uuid.uuid4().hex
+ e = exception.Unauthorized(message=risky_info)
+ self.assertValidJsonRendering(e)
+ self.assertNotIn(risky_info, six.text_type(e))
+
+ def test_unauthorized_exposure_in_debug(self):
+ self.config_fixture.config(debug=True)
+
+ risky_info = uuid.uuid4().hex
+ e = exception.Unauthorized(message=risky_info)
+ self.assertValidJsonRendering(e)
+ self.assertIn(risky_info, six.text_type(e))
+
+ def test_forbidden_exposure(self):
+ self.config_fixture.config(debug=False)
+
+ risky_info = uuid.uuid4().hex
+ e = exception.Forbidden(message=risky_info)
+ self.assertValidJsonRendering(e)
+ self.assertNotIn(risky_info, six.text_type(e))
+
+ def test_forbidden_exposure_in_debug(self):
+ self.config_fixture.config(debug=True)
+
+ risky_info = uuid.uuid4().hex
+ e = exception.Forbidden(message=risky_info)
+ self.assertValidJsonRendering(e)
+ self.assertIn(risky_info, six.text_type(e))
+
+ def test_forbidden_action_exposure(self):
+ self.config_fixture.config(debug=False)
+
+ risky_info = uuid.uuid4().hex
+ action = uuid.uuid4().hex
+ e = exception.ForbiddenAction(message=risky_info, action=action)
+ self.assertValidJsonRendering(e)
+ self.assertNotIn(risky_info, six.text_type(e))
+ self.assertIn(action, six.text_type(e))
+
+ e = exception.ForbiddenAction(action=risky_info)
+ self.assertValidJsonRendering(e)
+ self.assertIn(risky_info, six.text_type(e))
+
+ def test_forbidden_action_exposure_in_debug(self):
+ self.config_fixture.config(debug=True)
+
+ risky_info = uuid.uuid4().hex
+
+ e = exception.ForbiddenAction(message=risky_info)
+ self.assertValidJsonRendering(e)
+ self.assertIn(risky_info, six.text_type(e))
+
+ e = exception.ForbiddenAction(action=risky_info)
+ self.assertValidJsonRendering(e)
+ self.assertIn(risky_info, six.text_type(e))
+
+ def test_unicode_argument_message(self):
+ self.config_fixture.config(debug=False)
+
+ risky_info = u'\u7ee7\u7eed\u884c\u7f29\u8fdb\u6216'
+ e = exception.Forbidden(message=risky_info)
+ self.assertValidJsonRendering(e)
+ self.assertNotIn(risky_info, six.text_type(e))
diff --git a/keystone-moon/keystone/tests/unit/test_hacking_checks.py b/keystone-moon/keystone/tests/unit/test_hacking_checks.py
new file mode 100644
index 00000000..b9b047b3
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/test_hacking_checks.py
@@ -0,0 +1,143 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import textwrap
+
+import mock
+import pep8
+import testtools
+
+from keystone.hacking import checks
+from keystone.tests.unit.ksfixtures import hacking as hacking_fixtures
+
+
+class BaseStyleCheck(testtools.TestCase):
+
+ def setUp(self):
+ super(BaseStyleCheck, self).setUp()
+ self.code_ex = self.useFixture(self.get_fixture())
+ self.addCleanup(delattr, self, 'code_ex')
+
+ def get_checker(self):
+ """Returns the checker to be used for tests in this class."""
+ raise NotImplemented('subclasses must provide a real implementation')
+
+ def get_fixture(self):
+ return hacking_fixtures.HackingCode()
+
+ # We are patching pep8 so that only the check under test is actually
+ # installed.
+ @mock.patch('pep8._checks',
+ {'physical_line': {}, 'logical_line': {}, 'tree': {}})
+ def run_check(self, code):
+ pep8.register_check(self.get_checker())
+
+ lines = textwrap.dedent(code).strip().splitlines(True)
+
+ checker = pep8.Checker(lines=lines)
+ checker.check_all()
+ checker.report._deferred_print.sort()
+ return checker.report._deferred_print
+
+ def assert_has_errors(self, code, expected_errors=None):
+ actual_errors = [e[:3] for e in self.run_check(code)]
+ self.assertEqual(expected_errors or [], actual_errors)
+
+
+class TestCheckForMutableDefaultArgs(BaseStyleCheck):
+
+ def get_checker(self):
+ return checks.CheckForMutableDefaultArgs
+
+ def test(self):
+ code = self.code_ex.mutable_default_args['code']
+ errors = self.code_ex.mutable_default_args['expected_errors']
+ self.assert_has_errors(code, expected_errors=errors)
+
+
+class TestBlockCommentsBeginWithASpace(BaseStyleCheck):
+
+ def get_checker(self):
+ return checks.block_comments_begin_with_a_space
+
+ def test(self):
+ code = self.code_ex.comments_begin_with_space['code']
+ errors = self.code_ex.comments_begin_with_space['expected_errors']
+ self.assert_has_errors(code, expected_errors=errors)
+
+
+class TestAssertingNoneEquality(BaseStyleCheck):
+
+ def get_checker(self):
+ return checks.CheckForAssertingNoneEquality
+
+ def test(self):
+ code = self.code_ex.asserting_none_equality['code']
+ errors = self.code_ex.asserting_none_equality['expected_errors']
+ self.assert_has_errors(code, expected_errors=errors)
+
+
+class TestCheckForDebugLoggingIssues(BaseStyleCheck):
+
+ def get_checker(self):
+ return checks.CheckForLoggingIssues
+
+ def test_for_translations(self):
+ fixture = self.code_ex.assert_no_translations_for_debug_logging
+ code = fixture['code']
+ errors = fixture['expected_errors']
+ self.assert_has_errors(code, expected_errors=errors)
+
+
+class TestCheckForNonDebugLoggingIssues(BaseStyleCheck):
+
+ def get_checker(self):
+ return checks.CheckForLoggingIssues
+
+ def get_fixture(self):
+ return hacking_fixtures.HackingLogging()
+
+ def test_for_translations(self):
+ for example in self.code_ex.examples:
+ code = self.code_ex.shared_imports + example['code']
+ errors = example['expected_errors']
+ self.assert_has_errors(code, expected_errors=errors)
+
+ def assert_has_errors(self, code, expected_errors=None):
+ # pull out the parts of the error that we'll match against
+ actual_errors = (e[:3] for e in self.run_check(code))
+ # adjust line numbers to make the fixure data more readable.
+ import_lines = len(self.code_ex.shared_imports.split('\n')) - 1
+ actual_errors = [(e[0] - import_lines, e[1], e[2])
+ for e in actual_errors]
+ self.assertEqual(expected_errors or [], actual_errors)
+
+
+class TestCheckOsloNamespaceImports(BaseStyleCheck):
+ def get_checker(self):
+ return checks.check_oslo_namespace_imports
+
+ def test(self):
+ code = self.code_ex.oslo_namespace_imports['code']
+ errors = self.code_ex.oslo_namespace_imports['expected_errors']
+ self.assert_has_errors(code, expected_errors=errors)
+
+
+class TestDictConstructorWithSequenceCopy(BaseStyleCheck):
+
+ def get_checker(self):
+ return checks.dict_constructor_with_sequence_copy
+
+ def test(self):
+ code = self.code_ex.dict_constructor['code']
+ errors = self.code_ex.dict_constructor['expected_errors']
+ self.assert_has_errors(code, expected_errors=errors)
diff --git a/keystone-moon/keystone/tests/unit/test_ipv6.py b/keystone-moon/keystone/tests/unit/test_ipv6.py
new file mode 100644
index 00000000..e3d467fb
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/test_ipv6.py
@@ -0,0 +1,51 @@
+# Copyright 2012 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from oslo_config import cfg
+
+from keystone.common import environment
+from keystone.tests import unit as tests
+from keystone.tests.unit.ksfixtures import appserver
+
+
+CONF = cfg.CONF
+
+
+class IPv6TestCase(tests.TestCase):
+
+ def setUp(self):
+ self.skip_if_no_ipv6()
+ super(IPv6TestCase, self).setUp()
+ self.load_backends()
+
+ def test_ipv6_ok(self):
+ """Make sure both public and admin API work with ipv6."""
+ paste_conf = self._paste_config('keystone')
+
+ # Verify Admin
+ with appserver.AppServer(paste_conf, appserver.ADMIN, host="::1"):
+ conn = environment.httplib.HTTPConnection(
+ '::1', CONF.eventlet_server.admin_port)
+ conn.request('GET', '/')
+ resp = conn.getresponse()
+ self.assertEqual(300, resp.status)
+
+ # Verify Public
+ with appserver.AppServer(paste_conf, appserver.MAIN, host="::1"):
+ conn = environment.httplib.HTTPConnection(
+ '::1', CONF.eventlet_server.public_port)
+ conn.request('GET', '/')
+ resp = conn.getresponse()
+ self.assertEqual(300, resp.status)
diff --git a/keystone-moon/keystone/tests/unit/test_kvs.py b/keystone-moon/keystone/tests/unit/test_kvs.py
new file mode 100644
index 00000000..4d80ea33
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/test_kvs.py
@@ -0,0 +1,581 @@
+# Copyright 2013 Metacloud, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import time
+import uuid
+
+from dogpile.cache import api
+from dogpile.cache import proxy
+from dogpile.cache import util
+import mock
+import six
+from testtools import matchers
+
+from keystone.common.kvs.backends import inmemdb
+from keystone.common.kvs.backends import memcached
+from keystone.common.kvs import core
+from keystone import exception
+from keystone.tests import unit as tests
+
+NO_VALUE = api.NO_VALUE
+
+
+class MutexFixture(object):
+ def __init__(self, storage_dict, key, timeout):
+ self.database = storage_dict
+ self.key = '_lock' + key
+
+ def acquire(self, wait=True):
+ while True:
+ try:
+ self.database[self.key] = 1
+ return True
+ except KeyError:
+ return False
+
+ def release(self):
+ self.database.pop(self.key, None)
+
+
+class KVSBackendFixture(inmemdb.MemoryBackend):
+ def __init__(self, arguments):
+ class InmemTestDB(dict):
+ def __setitem__(self, key, value):
+ if key in self:
+ raise KeyError('Key %s already exists' % key)
+ super(InmemTestDB, self).__setitem__(key, value)
+
+ self._db = InmemTestDB()
+ self.lock_timeout = arguments.pop('lock_timeout', 5)
+ self.test_arg = arguments.pop('test_arg', None)
+
+ def get_mutex(self, key):
+ return MutexFixture(self._db, key, self.lock_timeout)
+
+ @classmethod
+ def key_mangler(cls, key):
+ return 'KVSBackend_' + key
+
+
+class KVSBackendForcedKeyMangleFixture(KVSBackendFixture):
+ use_backend_key_mangler = True
+
+ @classmethod
+ def key_mangler(cls, key):
+ return 'KVSBackendForcedKeyMangle_' + key
+
+
+class RegionProxyFixture(proxy.ProxyBackend):
+ """A test dogpile.cache proxy that does nothing."""
+
+
+class RegionProxy2Fixture(proxy.ProxyBackend):
+ """A test dogpile.cache proxy that does nothing."""
+
+
+class TestMemcacheDriver(api.CacheBackend):
+ """A test dogpile.cache backend that conforms to the mixin-mechanism for
+ overriding set and set_multi methods on dogpile memcached drivers.
+ """
+ class test_client(object):
+ # FIXME(morganfainberg): Convert this test client over to using mock
+ # and/or mock.MagicMock as appropriate
+
+ def __init__(self):
+ self.__name__ = 'TestingMemcacheDriverClientObject'
+ self.set_arguments_passed = None
+ self.keys_values = {}
+ self.lock_set_time = None
+ self.lock_expiry = None
+
+ def set(self, key, value, **set_arguments):
+ self.keys_values.clear()
+ self.keys_values[key] = value
+ self.set_arguments_passed = set_arguments
+
+ def set_multi(self, mapping, **set_arguments):
+ self.keys_values.clear()
+ self.keys_values = mapping
+ self.set_arguments_passed = set_arguments
+
+ def add(self, key, value, expiry_time):
+ # NOTE(morganfainberg): `add` is used in this case for the
+ # memcache lock testing. If further testing is required around the
+ # actual memcache `add` interface, this method should be
+ # expanded to work more like the actual memcache `add` function
+ if self.lock_expiry is not None and self.lock_set_time is not None:
+ if time.time() - self.lock_set_time < self.lock_expiry:
+ return False
+ self.lock_expiry = expiry_time
+ self.lock_set_time = time.time()
+ return True
+
+ def delete(self, key):
+ # NOTE(morganfainberg): `delete` is used in this case for the
+ # memcache lock testing. If further testing is required around the
+ # actual memcache `delete` interface, this method should be
+ # expanded to work more like the actual memcache `delete` function.
+ self.lock_expiry = None
+ self.lock_set_time = None
+ return True
+
+ def __init__(self, arguments):
+ self.client = self.test_client()
+ self.set_arguments = {}
+ # NOTE(morganfainberg): This is the same logic as the dogpile backend
+ # since we need to mirror that functionality for the `set_argument`
+ # values to appear on the actual backend.
+ if 'memcached_expire_time' in arguments:
+ self.set_arguments['time'] = arguments['memcached_expire_time']
+
+ def set(self, key, value):
+ self.client.set(key, value, **self.set_arguments)
+
+ def set_multi(self, mapping):
+ self.client.set_multi(mapping, **self.set_arguments)
+
+
+class KVSTest(tests.TestCase):
+ def setUp(self):
+ super(KVSTest, self).setUp()
+ self.key_foo = 'foo_' + uuid.uuid4().hex
+ self.value_foo = uuid.uuid4().hex
+ self.key_bar = 'bar_' + uuid.uuid4().hex
+ self.value_bar = {'complex_data_structure': uuid.uuid4().hex}
+ self.addCleanup(memcached.VALID_DOGPILE_BACKENDS.pop,
+ 'TestDriver',
+ None)
+ memcached.VALID_DOGPILE_BACKENDS['TestDriver'] = TestMemcacheDriver
+
+ def _get_kvs_region(self, name=None):
+ if name is None:
+ name = uuid.uuid4().hex
+ return core.get_key_value_store(name)
+
+ def test_kvs_basic_configuration(self):
+ # Test that the most basic configuration options pass through to the
+ # backend.
+ region_one = uuid.uuid4().hex
+ region_two = uuid.uuid4().hex
+ test_arg = 100
+ kvs = self._get_kvs_region(region_one)
+ kvs.configure('openstack.kvs.Memory')
+
+ self.assertIsInstance(kvs._region.backend, inmemdb.MemoryBackend)
+ self.assertEqual(region_one, kvs._region.name)
+
+ kvs = self._get_kvs_region(region_two)
+ kvs.configure('openstack.kvs.KVSBackendFixture',
+ test_arg=test_arg)
+
+ self.assertEqual(region_two, kvs._region.name)
+ self.assertEqual(test_arg, kvs._region.backend.test_arg)
+
+ def test_kvs_proxy_configuration(self):
+ # Test that proxies are applied correctly and in the correct (reverse)
+ # order to the kvs region.
+ kvs = self._get_kvs_region()
+ kvs.configure(
+ 'openstack.kvs.Memory',
+ proxy_list=['keystone.tests.unit.test_kvs.RegionProxyFixture',
+ 'keystone.tests.unit.test_kvs.RegionProxy2Fixture'])
+
+ self.assertIsInstance(kvs._region.backend, RegionProxyFixture)
+ self.assertIsInstance(kvs._region.backend.proxied, RegionProxy2Fixture)
+ self.assertIsInstance(kvs._region.backend.proxied.proxied,
+ inmemdb.MemoryBackend)
+
+ def test_kvs_key_mangler_fallthrough_default(self):
+ # Test to make sure we default to the standard dogpile sha1 hashing
+ # key_mangler
+ kvs = self._get_kvs_region()
+ kvs.configure('openstack.kvs.Memory')
+
+ self.assertIs(kvs._region.key_mangler, util.sha1_mangle_key)
+ # The backend should also have the keymangler set the same as the
+ # region now.
+ self.assertIs(kvs._region.backend.key_mangler, util.sha1_mangle_key)
+
+ def test_kvs_key_mangler_configuration_backend(self):
+ kvs = self._get_kvs_region()
+ kvs.configure('openstack.kvs.KVSBackendFixture')
+ expected = KVSBackendFixture.key_mangler(self.key_foo)
+ self.assertEqual(expected, kvs._region.key_mangler(self.key_foo))
+
+ def test_kvs_key_mangler_configuration_forced_backend(self):
+ kvs = self._get_kvs_region()
+ kvs.configure('openstack.kvs.KVSBackendForcedKeyMangleFixture',
+ key_mangler=util.sha1_mangle_key)
+ expected = KVSBackendForcedKeyMangleFixture.key_mangler(self.key_foo)
+ self.assertEqual(expected, kvs._region.key_mangler(self.key_foo))
+
+ def test_kvs_key_mangler_configuration_disabled(self):
+ # Test that no key_mangler is set if enable_key_mangler is false
+ self.config_fixture.config(group='kvs', enable_key_mangler=False)
+ kvs = self._get_kvs_region()
+ kvs.configure('openstack.kvs.Memory')
+
+ self.assertIsNone(kvs._region.key_mangler)
+ self.assertIsNone(kvs._region.backend.key_mangler)
+
+ def test_kvs_key_mangler_set_on_backend(self):
+ def test_key_mangler(key):
+ return key
+
+ kvs = self._get_kvs_region()
+ kvs.configure('openstack.kvs.Memory')
+ self.assertIs(kvs._region.backend.key_mangler, util.sha1_mangle_key)
+ kvs._set_key_mangler(test_key_mangler)
+ self.assertIs(kvs._region.backend.key_mangler, test_key_mangler)
+
+ def test_kvs_basic_get_set_delete(self):
+ # Test the basic get/set/delete actions on the KVS region
+ kvs = self._get_kvs_region()
+ kvs.configure('openstack.kvs.Memory')
+
+ # Not found should be raised if the key doesn't exist
+ self.assertRaises(exception.NotFound, kvs.get, key=self.key_bar)
+ kvs.set(self.key_bar, self.value_bar)
+ returned_value = kvs.get(self.key_bar)
+ # The returned value should be the same value as the value in .set
+ self.assertEqual(self.value_bar, returned_value)
+ # The value should not be the exact object used in .set
+ self.assertIsNot(returned_value, self.value_bar)
+ kvs.delete(self.key_bar)
+ # Second delete should raise NotFound
+ self.assertRaises(exception.NotFound, kvs.delete, key=self.key_bar)
+
+ def _kvs_multi_get_set_delete(self, kvs):
+ keys = [self.key_foo, self.key_bar]
+ expected = [self.value_foo, self.value_bar]
+
+ kvs.set_multi({self.key_foo: self.value_foo,
+ self.key_bar: self.value_bar})
+ # Returned value from get_multi should be a list of the values of the
+ # keys
+ self.assertEqual(expected, kvs.get_multi(keys))
+ # Delete both keys
+ kvs.delete_multi(keys)
+ # make sure that NotFound is properly raised when trying to get the now
+ # deleted keys
+ self.assertRaises(exception.NotFound, kvs.get_multi, keys=keys)
+ self.assertRaises(exception.NotFound, kvs.get, key=self.key_foo)
+ self.assertRaises(exception.NotFound, kvs.get, key=self.key_bar)
+ # Make sure get_multi raises NotFound if one of the keys isn't found
+ kvs.set(self.key_foo, self.value_foo)
+ self.assertRaises(exception.NotFound, kvs.get_multi, keys=keys)
+
+ def test_kvs_multi_get_set_delete(self):
+ kvs = self._get_kvs_region()
+ kvs.configure('openstack.kvs.Memory')
+
+ self._kvs_multi_get_set_delete(kvs)
+
+ def test_kvs_locking_context_handler(self):
+ # Make sure we're creating the correct key/value pairs for the backend
+ # distributed locking mutex.
+ self.config_fixture.config(group='kvs', enable_key_mangler=False)
+ kvs = self._get_kvs_region()
+ kvs.configure('openstack.kvs.KVSBackendFixture')
+
+ lock_key = '_lock' + self.key_foo
+ self.assertNotIn(lock_key, kvs._region.backend._db)
+ with core.KeyValueStoreLock(kvs._mutex(self.key_foo), self.key_foo):
+ self.assertIn(lock_key, kvs._region.backend._db)
+ self.assertIs(kvs._region.backend._db[lock_key], 1)
+
+ self.assertNotIn(lock_key, kvs._region.backend._db)
+
+ def test_kvs_locking_context_handler_locking_disabled(self):
+ # Make sure no creation of key/value pairs for the backend
+ # distributed locking mutex occurs if locking is disabled.
+ self.config_fixture.config(group='kvs', enable_key_mangler=False)
+ kvs = self._get_kvs_region()
+ kvs.configure('openstack.kvs.KVSBackendFixture', locking=False)
+ lock_key = '_lock' + self.key_foo
+ self.assertNotIn(lock_key, kvs._region.backend._db)
+ with core.KeyValueStoreLock(kvs._mutex(self.key_foo), self.key_foo,
+ False):
+ self.assertNotIn(lock_key, kvs._region.backend._db)
+
+ self.assertNotIn(lock_key, kvs._region.backend._db)
+
+ def test_kvs_with_lock_action_context_manager_timeout(self):
+ kvs = self._get_kvs_region()
+ lock_timeout = 5
+ kvs.configure('openstack.kvs.Memory', lock_timeout=lock_timeout)
+
+ def do_with_lock_action_timeout(kvs_region, key, offset):
+ with kvs_region.get_lock(key) as lock_in_use:
+ self.assertTrue(lock_in_use.active)
+ # Subtract the offset from the acquire_time. If this puts the
+ # acquire_time difference from time.time() at >= lock_timeout
+ # this should raise a LockTimeout exception. This is because
+ # there is a built-in 1-second overlap where the context
+ # manager thinks the lock is expired but the lock is still
+ # active. This is to help mitigate race conditions on the
+ # time-check itself.
+ lock_in_use.acquire_time -= offset
+ with kvs_region._action_with_lock(key, lock_in_use):
+ pass
+
+ # This should succeed, we are not timed-out here.
+ do_with_lock_action_timeout(kvs, key=uuid.uuid4().hex, offset=2)
+ # Try it now with an offset equal to the lock_timeout
+ self.assertRaises(core.LockTimeout,
+ do_with_lock_action_timeout,
+ kvs_region=kvs,
+ key=uuid.uuid4().hex,
+ offset=lock_timeout)
+ # Final test with offset significantly greater than the lock_timeout
+ self.assertRaises(core.LockTimeout,
+ do_with_lock_action_timeout,
+ kvs_region=kvs,
+ key=uuid.uuid4().hex,
+ offset=100)
+
+ def test_kvs_with_lock_action_mismatched_keys(self):
+ kvs = self._get_kvs_region()
+ kvs.configure('openstack.kvs.Memory')
+
+ def do_with_lock_action(kvs_region, lock_key, target_key):
+ with kvs_region.get_lock(lock_key) as lock_in_use:
+ self.assertTrue(lock_in_use.active)
+ with kvs_region._action_with_lock(target_key, lock_in_use):
+ pass
+
+ # Ensure we raise a ValueError if the lock key mismatches from the
+ # target key.
+ self.assertRaises(ValueError,
+ do_with_lock_action,
+ kvs_region=kvs,
+ lock_key=self.key_foo,
+ target_key=self.key_bar)
+
+ def test_kvs_with_lock_action_context_manager(self):
+ # Make sure we're creating the correct key/value pairs for the backend
+ # distributed locking mutex.
+ self.config_fixture.config(group='kvs', enable_key_mangler=False)
+ kvs = self._get_kvs_region()
+ kvs.configure('openstack.kvs.KVSBackendFixture')
+
+ lock_key = '_lock' + self.key_foo
+ self.assertNotIn(lock_key, kvs._region.backend._db)
+ with kvs.get_lock(self.key_foo) as lock:
+ with kvs._action_with_lock(self.key_foo, lock):
+ self.assertTrue(lock.active)
+ self.assertIn(lock_key, kvs._region.backend._db)
+ self.assertIs(kvs._region.backend._db[lock_key], 1)
+
+ self.assertNotIn(lock_key, kvs._region.backend._db)
+
+ def test_kvs_with_lock_action_context_manager_no_lock(self):
+ # Make sure we're not locking unless an actual lock is passed into the
+ # context manager
+ self.config_fixture.config(group='kvs', enable_key_mangler=False)
+ kvs = self._get_kvs_region()
+ kvs.configure('openstack.kvs.KVSBackendFixture')
+
+ lock_key = '_lock' + self.key_foo
+ lock = None
+ self.assertNotIn(lock_key, kvs._region.backend._db)
+ with kvs._action_with_lock(self.key_foo, lock):
+ self.assertNotIn(lock_key, kvs._region.backend._db)
+
+ self.assertNotIn(lock_key, kvs._region.backend._db)
+
+ def test_kvs_backend_registration_does_not_reregister_backends(self):
+ # SetUp registers the test backends. Running this again would raise an
+ # exception if re-registration of the backends occurred.
+ kvs = self._get_kvs_region()
+ kvs.configure('openstack.kvs.Memory')
+ core._register_backends()
+
+ def test_kvs_memcached_manager_valid_dogpile_memcached_backend(self):
+ kvs = self._get_kvs_region()
+ kvs.configure('openstack.kvs.Memcached',
+ memcached_backend='TestDriver')
+ self.assertIsInstance(kvs._region.backend.driver,
+ TestMemcacheDriver)
+
+ def test_kvs_memcached_manager_invalid_dogpile_memcached_backend(self):
+ # Invalid dogpile memcache backend should raise ValueError
+ kvs = self._get_kvs_region()
+ self.assertRaises(ValueError,
+ kvs.configure,
+ backing_store='openstack.kvs.Memcached',
+ memcached_backend=uuid.uuid4().hex)
+
+ def test_kvs_memcache_manager_no_expiry_keys(self):
+ # Make sure the memcache backend recalculates the no-expiry keys
+ # correctly when a key-mangler is set on it.
+
+ def new_mangler(key):
+ return '_mangled_key_' + key
+
+ kvs = self._get_kvs_region()
+ no_expiry_keys = set(['test_key'])
+ kvs.configure('openstack.kvs.Memcached',
+ memcached_backend='TestDriver',
+ no_expiry_keys=no_expiry_keys)
+ calculated_keys = set([kvs._region.key_mangler(key)
+ for key in no_expiry_keys])
+ self.assertIs(kvs._region.backend.key_mangler, util.sha1_mangle_key)
+ self.assertSetEqual(calculated_keys,
+ kvs._region.backend.no_expiry_hashed_keys)
+ self.assertSetEqual(no_expiry_keys,
+ kvs._region.backend.raw_no_expiry_keys)
+ calculated_keys = set([new_mangler(key) for key in no_expiry_keys])
+ kvs._region.backend.key_mangler = new_mangler
+ self.assertSetEqual(calculated_keys,
+ kvs._region.backend.no_expiry_hashed_keys)
+ self.assertSetEqual(no_expiry_keys,
+ kvs._region.backend.raw_no_expiry_keys)
+
+ def test_kvs_memcache_key_mangler_set_to_none(self):
+ kvs = self._get_kvs_region()
+ no_expiry_keys = set(['test_key'])
+ kvs.configure('openstack.kvs.Memcached',
+ memcached_backend='TestDriver',
+ no_expiry_keys=no_expiry_keys)
+ self.assertIs(kvs._region.backend.key_mangler, util.sha1_mangle_key)
+ kvs._region.backend.key_mangler = None
+ self.assertSetEqual(kvs._region.backend.raw_no_expiry_keys,
+ kvs._region.backend.no_expiry_hashed_keys)
+ self.assertIsNone(kvs._region.backend.key_mangler)
+
+ def test_noncallable_key_mangler_set_on_driver_raises_type_error(self):
+ kvs = self._get_kvs_region()
+ kvs.configure('openstack.kvs.Memcached',
+ memcached_backend='TestDriver')
+ self.assertRaises(TypeError,
+ setattr,
+ kvs._region.backend,
+ 'key_mangler',
+ 'Non-Callable')
+
+ def test_kvs_memcache_set_arguments_and_memcache_expires_ttl(self):
+ # Test the "set_arguments" (arguments passed on all set calls) logic
+ # and the no-expiry-key modifications of set_arguments for the explicit
+ # memcache TTL.
+ self.config_fixture.config(group='kvs', enable_key_mangler=False)
+ kvs = self._get_kvs_region()
+ memcache_expire_time = 86400
+
+ expected_set_args = {'time': memcache_expire_time}
+ expected_no_expiry_args = {}
+
+ expected_foo_keys = [self.key_foo]
+ expected_bar_keys = [self.key_bar]
+
+ mapping_foo = {self.key_foo: self.value_foo}
+ mapping_bar = {self.key_bar: self.value_bar}
+
+ kvs.configure(backing_store='openstack.kvs.Memcached',
+ memcached_backend='TestDriver',
+ memcached_expire_time=memcache_expire_time,
+ some_other_arg=uuid.uuid4().hex,
+ no_expiry_keys=[self.key_bar])
+ # Ensure the set_arguments are correct
+ self.assertDictEqual(
+ kvs._region.backend._get_set_arguments_driver_attr(),
+ expected_set_args)
+
+ # Set a key that would have an expiry and verify the correct result
+ # occurred and that the correct set_arguments were passed.
+ kvs.set(self.key_foo, self.value_foo)
+ self.assertDictEqual(
+ kvs._region.backend.driver.client.set_arguments_passed,
+ expected_set_args)
+ self.assertEqual(expected_foo_keys,
+ kvs._region.backend.driver.client.keys_values.keys())
+ self.assertEqual(
+ self.value_foo,
+ kvs._region.backend.driver.client.keys_values[self.key_foo][0])
+
+ # Set a key that would not have an expiry and verify the correct result
+ # occurred and that the correct set_arguments were passed.
+ kvs.set(self.key_bar, self.value_bar)
+ self.assertDictEqual(
+ kvs._region.backend.driver.client.set_arguments_passed,
+ expected_no_expiry_args)
+ self.assertEqual(expected_bar_keys,
+ kvs._region.backend.driver.client.keys_values.keys())
+ self.assertEqual(
+ self.value_bar,
+ kvs._region.backend.driver.client.keys_values[self.key_bar][0])
+
+ # set_multi a dict that would have an expiry and verify the correct
+ # result occurred and that the correct set_arguments were passed.
+ kvs.set_multi(mapping_foo)
+ self.assertDictEqual(
+ kvs._region.backend.driver.client.set_arguments_passed,
+ expected_set_args)
+ self.assertEqual(expected_foo_keys,
+ kvs._region.backend.driver.client.keys_values.keys())
+ self.assertEqual(
+ self.value_foo,
+ kvs._region.backend.driver.client.keys_values[self.key_foo][0])
+
+ # set_multi a dict that would not have an expiry and verify the correct
+ # result occurred and that the correct set_arguments were passed.
+ kvs.set_multi(mapping_bar)
+ self.assertDictEqual(
+ kvs._region.backend.driver.client.set_arguments_passed,
+ expected_no_expiry_args)
+ self.assertEqual(expected_bar_keys,
+ kvs._region.backend.driver.client.keys_values.keys())
+ self.assertEqual(
+ self.value_bar,
+ kvs._region.backend.driver.client.keys_values[self.key_bar][0])
+
+ def test_memcached_lock_max_lock_attempts(self):
+ kvs = self._get_kvs_region()
+ max_lock_attempts = 1
+ test_key = uuid.uuid4().hex
+
+ kvs.configure(backing_store='openstack.kvs.Memcached',
+ memcached_backend='TestDriver',
+ max_lock_attempts=max_lock_attempts)
+
+ self.assertEqual(max_lock_attempts,
+ kvs._region.backend.max_lock_attempts)
+ # Simple Lock success test
+ with kvs.get_lock(test_key) as lock:
+ kvs.set(test_key, 'testing', lock)
+
+ def lock_within_a_lock(key):
+ with kvs.get_lock(key) as first_lock:
+ kvs.set(test_key, 'lock', first_lock)
+ with kvs.get_lock(key) as second_lock:
+ kvs.set(key, 'lock-within-a-lock', second_lock)
+
+ self.assertRaises(exception.UnexpectedError,
+ lock_within_a_lock,
+ key=test_key)
+
+
+class TestMemcachedBackend(tests.TestCase):
+
+ @mock.patch('keystone.common.kvs.backends.memcached._', six.text_type)
+ def test_invalid_backend_fails_initialization(self):
+ raises_valueerror = matchers.Raises(matchers.MatchesException(
+ ValueError, r'.*FakeBackend.*'))
+
+ options = {
+ 'url': 'needed to get to the focus of this test (the backend)',
+ 'memcached_backend': 'FakeBackend',
+ }
+ self.assertThat(lambda: memcached.MemcachedBackend(options),
+ raises_valueerror)
diff --git a/keystone-moon/keystone/tests/unit/test_ldap_livetest.py b/keystone-moon/keystone/tests/unit/test_ldap_livetest.py
new file mode 100644
index 00000000..5b449362
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/test_ldap_livetest.py
@@ -0,0 +1,229 @@
+# Copyright 2012 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import subprocess
+import uuid
+
+import ldap
+import ldap.modlist
+from oslo_config import cfg
+
+from keystone import exception
+from keystone.identity.backends import ldap as identity_ldap
+from keystone.tests import unit as tests
+from keystone.tests.unit import test_backend_ldap
+
+
+CONF = cfg.CONF
+
+
+def create_object(dn, attrs):
+ conn = ldap.initialize(CONF.ldap.url)
+ conn.simple_bind_s(CONF.ldap.user, CONF.ldap.password)
+ ldif = ldap.modlist.addModlist(attrs)
+ conn.add_s(dn, ldif)
+ conn.unbind_s()
+
+
+class LiveLDAPIdentity(test_backend_ldap.LDAPIdentity):
+
+ def setUp(self):
+ self._ldap_skip_live()
+ super(LiveLDAPIdentity, self).setUp()
+
+ def _ldap_skip_live(self):
+ self.skip_if_env_not_set('ENABLE_LDAP_LIVE_TEST')
+
+ def clear_database(self):
+ devnull = open('/dev/null', 'w')
+ subprocess.call(['ldapdelete',
+ '-x',
+ '-D', CONF.ldap.user,
+ '-H', CONF.ldap.url,
+ '-w', CONF.ldap.password,
+ '-r', CONF.ldap.suffix],
+ stderr=devnull)
+
+ if CONF.ldap.suffix.startswith('ou='):
+ tree_dn_attrs = {'objectclass': 'organizationalUnit',
+ 'ou': 'openstack'}
+ else:
+ tree_dn_attrs = {'objectclass': ['dcObject', 'organizationalUnit'],
+ 'dc': 'openstack',
+ 'ou': 'openstack'}
+ create_object(CONF.ldap.suffix, tree_dn_attrs)
+ create_object(CONF.ldap.user_tree_dn,
+ {'objectclass': 'organizationalUnit',
+ 'ou': 'Users'})
+ create_object(CONF.ldap.role_tree_dn,
+ {'objectclass': 'organizationalUnit',
+ 'ou': 'Roles'})
+ create_object(CONF.ldap.project_tree_dn,
+ {'objectclass': 'organizationalUnit',
+ 'ou': 'Projects'})
+ create_object(CONF.ldap.group_tree_dn,
+ {'objectclass': 'organizationalUnit',
+ 'ou': 'UserGroups'})
+
+ def config_files(self):
+ config_files = super(LiveLDAPIdentity, self).config_files()
+ config_files.append(tests.dirs.tests_conf('backend_liveldap.conf'))
+ return config_files
+
+ def config_overrides(self):
+ super(LiveLDAPIdentity, self).config_overrides()
+ self.config_fixture.config(
+ group='identity',
+ driver='keystone.identity.backends.ldap.Identity')
+
+ def test_build_tree(self):
+ """Regression test for building the tree names
+ """
+ # logic is different from the fake backend.
+ user_api = identity_ldap.UserApi(CONF)
+ self.assertTrue(user_api)
+ self.assertEqual(user_api.tree_dn, CONF.ldap.user_tree_dn)
+
+ def tearDown(self):
+ tests.TestCase.tearDown(self)
+
+ def test_ldap_dereferencing(self):
+ alt_users_ldif = {'objectclass': ['top', 'organizationalUnit'],
+ 'ou': 'alt_users'}
+ alt_fake_user_ldif = {'objectclass': ['person', 'inetOrgPerson'],
+ 'cn': 'alt_fake1',
+ 'sn': 'alt_fake1'}
+ aliased_users_ldif = {'objectclass': ['alias', 'extensibleObject'],
+ 'aliasedobjectname': "ou=alt_users,%s" %
+ CONF.ldap.suffix}
+ create_object("ou=alt_users,%s" % CONF.ldap.suffix, alt_users_ldif)
+ create_object("%s=alt_fake1,ou=alt_users,%s" %
+ (CONF.ldap.user_id_attribute, CONF.ldap.suffix),
+ alt_fake_user_ldif)
+ create_object("ou=alt_users,%s" % CONF.ldap.user_tree_dn,
+ aliased_users_ldif)
+
+ self.config_fixture.config(group='ldap',
+ query_scope='sub',
+ alias_dereferencing='never')
+ self.identity_api = identity_ldap.Identity()
+ self.assertRaises(exception.UserNotFound,
+ self.identity_api.get_user,
+ 'alt_fake1')
+
+ self.config_fixture.config(group='ldap',
+ alias_dereferencing='searching')
+ self.identity_api = identity_ldap.Identity()
+ user_ref = self.identity_api.get_user('alt_fake1')
+ self.assertEqual('alt_fake1', user_ref['id'])
+
+ self.config_fixture.config(group='ldap', alias_dereferencing='always')
+ self.identity_api = identity_ldap.Identity()
+ user_ref = self.identity_api.get_user('alt_fake1')
+ self.assertEqual('alt_fake1', user_ref['id'])
+
+ # FakeLDAP does not correctly process filters, so this test can only be
+ # run against a live LDAP server
+ def test_list_groups_for_user_filtered(self):
+ domain = self._get_domain_fixture()
+ test_groups = []
+ test_users = []
+ GROUP_COUNT = 3
+ USER_COUNT = 2
+
+ for x in range(0, USER_COUNT):
+ new_user = {'name': uuid.uuid4().hex, 'password': uuid.uuid4().hex,
+ 'enabled': True, 'domain_id': domain['id']}
+ new_user = self.identity_api.create_user(new_user)
+ test_users.append(new_user)
+ positive_user = test_users[0]
+ negative_user = test_users[1]
+
+ for x in range(0, USER_COUNT):
+ group_refs = self.identity_api.list_groups_for_user(
+ test_users[x]['id'])
+ self.assertEqual(0, len(group_refs))
+
+ for x in range(0, GROUP_COUNT):
+ new_group = {'domain_id': domain['id'],
+ 'name': uuid.uuid4().hex}
+ new_group = self.identity_api.create_group(new_group)
+ test_groups.append(new_group)
+
+ group_refs = self.identity_api.list_groups_for_user(
+ positive_user['id'])
+ self.assertEqual(x, len(group_refs))
+
+ self.identity_api.add_user_to_group(
+ positive_user['id'],
+ new_group['id'])
+ group_refs = self.identity_api.list_groups_for_user(
+ positive_user['id'])
+ self.assertEqual(x + 1, len(group_refs))
+
+ group_refs = self.identity_api.list_groups_for_user(
+ negative_user['id'])
+ self.assertEqual(0, len(group_refs))
+
+ self.config_fixture.config(group='ldap', group_filter='(dn=xx)')
+ self.reload_backends(CONF.identity.default_domain_id)
+ group_refs = self.identity_api.list_groups_for_user(
+ positive_user['id'])
+ self.assertEqual(0, len(group_refs))
+ group_refs = self.identity_api.list_groups_for_user(
+ negative_user['id'])
+ self.assertEqual(0, len(group_refs))
+
+ self.config_fixture.config(group='ldap',
+ group_filter='(objectclass=*)')
+ self.reload_backends(CONF.identity.default_domain_id)
+ group_refs = self.identity_api.list_groups_for_user(
+ positive_user['id'])
+ self.assertEqual(GROUP_COUNT, len(group_refs))
+ group_refs = self.identity_api.list_groups_for_user(
+ negative_user['id'])
+ self.assertEqual(0, len(group_refs))
+
+ def test_user_enable_attribute_mask(self):
+ self.config_fixture.config(
+ group='ldap',
+ user_enabled_emulation=False,
+ user_enabled_attribute='employeeType')
+ super(LiveLDAPIdentity, self).test_user_enable_attribute_mask()
+
+ def test_create_project_case_sensitivity(self):
+ # The attribute used for the live LDAP tests is case insensitive.
+
+ def call_super():
+ (super(LiveLDAPIdentity, self).
+ test_create_project_case_sensitivity())
+
+ self.assertRaises(exception.Conflict, call_super)
+
+ def test_create_user_case_sensitivity(self):
+ # The attribute used for the live LDAP tests is case insensitive.
+
+ def call_super():
+ super(LiveLDAPIdentity, self).test_create_user_case_sensitivity()
+
+ self.assertRaises(exception.Conflict, call_super)
+
+ def test_project_update_missing_attrs_with_a_falsey_value(self):
+ # The description attribute doesn't allow an empty value.
+
+ def call_super():
+ (super(LiveLDAPIdentity, self).
+ test_project_update_missing_attrs_with_a_falsey_value())
+
+ self.assertRaises(ldap.INVALID_SYNTAX, call_super)
diff --git a/keystone-moon/keystone/tests/unit/test_ldap_pool_livetest.py b/keystone-moon/keystone/tests/unit/test_ldap_pool_livetest.py
new file mode 100644
index 00000000..02fa8145
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/test_ldap_pool_livetest.py
@@ -0,0 +1,208 @@
+# Copyright 2012 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import uuid
+
+import ldappool
+from oslo_config import cfg
+
+from keystone.common.ldap import core as ldap_core
+from keystone.identity.backends import ldap
+from keystone.tests import unit as tests
+from keystone.tests.unit import fakeldap
+from keystone.tests.unit import test_backend_ldap_pool
+from keystone.tests.unit import test_ldap_livetest
+
+
+CONF = cfg.CONF
+
+
+class LiveLDAPPoolIdentity(test_backend_ldap_pool.LdapPoolCommonTestMixin,
+ test_ldap_livetest.LiveLDAPIdentity):
+ """Executes existing LDAP live test with pooled LDAP handler to make
+ sure it works without any error.
+
+ Also executes common pool specific tests via Mixin class.
+ """
+
+ def setUp(self):
+ super(LiveLDAPPoolIdentity, self).setUp()
+ self.addCleanup(self.cleanup_pools)
+ # storing to local variable to avoid long references
+ self.conn_pools = ldap_core.PooledLDAPHandler.connection_pools
+
+ def config_files(self):
+ config_files = super(LiveLDAPPoolIdentity, self).config_files()
+ config_files.append(tests.dirs.
+ tests_conf('backend_pool_liveldap.conf'))
+ return config_files
+
+ def config_overrides(self):
+ super(LiveLDAPPoolIdentity, self).config_overrides()
+ self.config_fixture.config(
+ group='identity',
+ driver='keystone.identity.backends.ldap.Identity')
+
+ def test_assert_connector_used_not_fake_ldap_pool(self):
+ handler = ldap_core._get_connection(CONF.ldap.url, use_pool=True)
+ self.assertNotEqual(type(handler.Connector),
+ type(fakeldap.FakeLdapPool))
+ self.assertEqual(type(ldappool.StateConnector),
+ type(handler.Connector))
+
+ def test_async_search_and_result3(self):
+ self.config_fixture.config(group='ldap', page_size=1)
+ self.test_user_enable_attribute_mask()
+
+ def test_pool_size_expands_correctly(self):
+
+ who = CONF.ldap.user
+ cred = CONF.ldap.password
+ # get related connection manager instance
+ ldappool_cm = self.conn_pools[CONF.ldap.url]
+
+ def _get_conn():
+ return ldappool_cm.connection(who, cred)
+
+ with _get_conn() as c1: # 1
+ self.assertEqual(1, len(ldappool_cm))
+ self.assertTrue(c1.connected, True)
+ self.assertTrue(c1.active, True)
+ with _get_conn() as c2: # conn2
+ self.assertEqual(2, len(ldappool_cm))
+ self.assertTrue(c2.connected)
+ self.assertTrue(c2.active)
+
+ self.assertEqual(2, len(ldappool_cm))
+ # c2 went out of context, its connected but not active
+ self.assertTrue(c2.connected)
+ self.assertFalse(c2.active)
+ with _get_conn() as c3: # conn3
+ self.assertEqual(2, len(ldappool_cm))
+ self.assertTrue(c3.connected)
+ self.assertTrue(c3.active)
+ self.assertTrue(c3 is c2) # same connection is reused
+ self.assertTrue(c2.active)
+ with _get_conn() as c4: # conn4
+ self.assertEqual(3, len(ldappool_cm))
+ self.assertTrue(c4.connected)
+ self.assertTrue(c4.active)
+
+ def test_password_change_with_auth_pool_disabled(self):
+ self.config_fixture.config(group='ldap', use_auth_pool=False)
+ old_password = self.user_sna['password']
+
+ self.test_password_change_with_pool()
+
+ self.assertRaises(AssertionError,
+ self.identity_api.authenticate,
+ context={},
+ user_id=self.user_sna['id'],
+ password=old_password)
+
+ def _create_user_and_authenticate(self, password):
+ user_dict = {
+ 'domain_id': CONF.identity.default_domain_id,
+ 'name': uuid.uuid4().hex,
+ 'password': password}
+ user = self.identity_api.create_user(user_dict)
+
+ self.identity_api.authenticate(
+ context={},
+ user_id=user['id'],
+ password=password)
+
+ return self.identity_api.get_user(user['id'])
+
+ def _get_auth_conn_pool_cm(self):
+ pool_url = ldap_core.PooledLDAPHandler.auth_pool_prefix + CONF.ldap.url
+ return self.conn_pools[pool_url]
+
+ def _do_password_change_for_one_user(self, password, new_password):
+ self.config_fixture.config(group='ldap', use_auth_pool=True)
+ self.cleanup_pools()
+ self.load_backends()
+
+ user1 = self._create_user_and_authenticate(password)
+ auth_cm = self._get_auth_conn_pool_cm()
+ self.assertEqual(1, len(auth_cm))
+ user2 = self._create_user_and_authenticate(password)
+ self.assertEqual(1, len(auth_cm))
+ user3 = self._create_user_and_authenticate(password)
+ self.assertEqual(1, len(auth_cm))
+ user4 = self._create_user_and_authenticate(password)
+ self.assertEqual(1, len(auth_cm))
+ user5 = self._create_user_and_authenticate(password)
+ self.assertEqual(1, len(auth_cm))
+
+ # connection pool size remains 1 even for different user ldap bind
+ # as there is only one active connection at a time
+
+ user_api = ldap.UserApi(CONF)
+ u1_dn = user_api._id_to_dn_string(user1['id'])
+ u2_dn = user_api._id_to_dn_string(user2['id'])
+ u3_dn = user_api._id_to_dn_string(user3['id'])
+ u4_dn = user_api._id_to_dn_string(user4['id'])
+ u5_dn = user_api._id_to_dn_string(user5['id'])
+
+ # now create multiple active connections for end user auth case which
+ # will force to keep them in pool. After that, modify one of user
+ # password. Need to make sure that user connection is in middle
+ # of pool list.
+ auth_cm = self._get_auth_conn_pool_cm()
+ with auth_cm.connection(u1_dn, password) as _:
+ with auth_cm.connection(u2_dn, password) as _:
+ with auth_cm.connection(u3_dn, password) as _:
+ with auth_cm.connection(u4_dn, password) as _:
+ with auth_cm.connection(u5_dn, password) as _:
+ self.assertEqual(5, len(auth_cm))
+ _.unbind_s()
+
+ user3['password'] = new_password
+ self.identity_api.update_user(user3['id'], user3)
+
+ return user3
+
+ def test_password_change_with_auth_pool_enabled_long_lifetime(self):
+ self.config_fixture.config(group='ldap',
+ auth_pool_connection_lifetime=600)
+ old_password = 'my_password'
+ new_password = 'new_password'
+ user = self._do_password_change_for_one_user(old_password,
+ new_password)
+ user.pop('password')
+
+ # with long connection lifetime auth_pool can bind to old password
+ # successfully which is not desired if password change is frequent
+ # use case in a deployment.
+ # This can happen in multiple concurrent connections case only.
+ user_ref = self.identity_api.authenticate(
+ context={}, user_id=user['id'], password=old_password)
+
+ self.assertDictEqual(user_ref, user)
+
+ def test_password_change_with_auth_pool_enabled_no_lifetime(self):
+ self.config_fixture.config(group='ldap',
+ auth_pool_connection_lifetime=0)
+
+ old_password = 'my_password'
+ new_password = 'new_password'
+ user = self._do_password_change_for_one_user(old_password,
+ new_password)
+ # now as connection lifetime is zero, so authentication
+ # with old password will always fail.
+ self.assertRaises(AssertionError,
+ self.identity_api.authenticate,
+ context={}, user_id=user['id'],
+ password=old_password)
diff --git a/keystone-moon/keystone/tests/unit/test_ldap_tls_livetest.py b/keystone-moon/keystone/tests/unit/test_ldap_tls_livetest.py
new file mode 100644
index 00000000..d79c2bad
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/test_ldap_tls_livetest.py
@@ -0,0 +1,122 @@
+# Copyright 2013 OpenStack Foundation
+# Copyright 2013 IBM Corp.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import ldap
+import ldap.modlist
+from oslo_config import cfg
+
+from keystone import exception
+from keystone import identity
+from keystone.tests import unit as tests
+from keystone.tests.unit import test_ldap_livetest
+
+
+CONF = cfg.CONF
+
+
+def create_object(dn, attrs):
+ conn = ldap.initialize(CONF.ldap.url)
+ conn.simple_bind_s(CONF.ldap.user, CONF.ldap.password)
+ ldif = ldap.modlist.addModlist(attrs)
+ conn.add_s(dn, ldif)
+ conn.unbind_s()
+
+
+class LiveTLSLDAPIdentity(test_ldap_livetest.LiveLDAPIdentity):
+
+ def _ldap_skip_live(self):
+ self.skip_if_env_not_set('ENABLE_TLS_LDAP_LIVE_TEST')
+
+ def config_files(self):
+ config_files = super(LiveTLSLDAPIdentity, self).config_files()
+ config_files.append(tests.dirs.tests_conf('backend_tls_liveldap.conf'))
+ return config_files
+
+ def config_overrides(self):
+ super(LiveTLSLDAPIdentity, self).config_overrides()
+ self.config_fixture.config(
+ group='identity',
+ driver='keystone.identity.backends.ldap.Identity')
+
+ def test_tls_certfile_demand_option(self):
+ self.config_fixture.config(group='ldap',
+ use_tls=True,
+ tls_cacertdir=None,
+ tls_req_cert='demand')
+ self.identity_api = identity.backends.ldap.Identity()
+
+ user = {'name': 'fake1',
+ 'password': 'fakepass1',
+ 'tenants': ['bar']}
+ user = self.identity_api.create_user('user')
+ user_ref = self.identity_api.get_user(user['id'])
+ self.assertEqual(user['id'], user_ref['id'])
+
+ user['password'] = 'fakepass2'
+ self.identity_api.update_user(user['id'], user)
+
+ self.identity_api.delete_user(user['id'])
+ self.assertRaises(exception.UserNotFound, self.identity_api.get_user,
+ user['id'])
+
+ def test_tls_certdir_demand_option(self):
+ self.config_fixture.config(group='ldap',
+ use_tls=True,
+ tls_cacertdir=None,
+ tls_req_cert='demand')
+ self.identity_api = identity.backends.ldap.Identity()
+
+ user = {'id': 'fake1',
+ 'name': 'fake1',
+ 'password': 'fakepass1',
+ 'tenants': ['bar']}
+ self.identity_api.create_user('fake1', user)
+ user_ref = self.identity_api.get_user('fake1')
+ self.assertEqual('fake1', user_ref['id'])
+
+ user['password'] = 'fakepass2'
+ self.identity_api.update_user('fake1', user)
+
+ self.identity_api.delete_user('fake1')
+ self.assertRaises(exception.UserNotFound, self.identity_api.get_user,
+ 'fake1')
+
+ def test_tls_bad_certfile(self):
+ self.config_fixture.config(
+ group='ldap',
+ use_tls=True,
+ tls_req_cert='demand',
+ tls_cacertfile='/etc/keystone/ssl/certs/mythicalcert.pem',
+ tls_cacertdir=None)
+ self.identity_api = identity.backends.ldap.Identity()
+
+ user = {'name': 'fake1',
+ 'password': 'fakepass1',
+ 'tenants': ['bar']}
+ self.assertRaises(IOError, self.identity_api.create_user, user)
+
+ def test_tls_bad_certdir(self):
+ self.config_fixture.config(
+ group='ldap',
+ use_tls=True,
+ tls_cacertfile=None,
+ tls_req_cert='demand',
+ tls_cacertdir='/etc/keystone/ssl/mythicalcertdir')
+ self.identity_api = identity.backends.ldap.Identity()
+
+ user = {'name': 'fake1',
+ 'password': 'fakepass1',
+ 'tenants': ['bar']}
+ self.assertRaises(IOError, self.identity_api.create_user, user)
diff --git a/keystone-moon/keystone/tests/unit/test_middleware.py b/keystone-moon/keystone/tests/unit/test_middleware.py
new file mode 100644
index 00000000..3a26dd24
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/test_middleware.py
@@ -0,0 +1,119 @@
+# Copyright 2012 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from oslo_config import cfg
+import webob
+
+from keystone import middleware
+from keystone.tests import unit as tests
+
+
+CONF = cfg.CONF
+
+
+def make_request(**kwargs):
+ accept = kwargs.pop('accept', None)
+ method = kwargs.pop('method', 'GET')
+ body = kwargs.pop('body', None)
+ req = webob.Request.blank('/', **kwargs)
+ req.method = method
+ if body is not None:
+ req.body = body
+ if accept is not None:
+ req.accept = accept
+ return req
+
+
+def make_response(**kwargs):
+ body = kwargs.pop('body', None)
+ return webob.Response(body)
+
+
+class TokenAuthMiddlewareTest(tests.TestCase):
+ def test_request(self):
+ req = make_request()
+ req.headers[middleware.AUTH_TOKEN_HEADER] = 'MAGIC'
+ middleware.TokenAuthMiddleware(None).process_request(req)
+ context = req.environ[middleware.CONTEXT_ENV]
+ self.assertEqual('MAGIC', context['token_id'])
+
+
+class AdminTokenAuthMiddlewareTest(tests.TestCase):
+ def test_request_admin(self):
+ req = make_request()
+ req.headers[middleware.AUTH_TOKEN_HEADER] = CONF.admin_token
+ middleware.AdminTokenAuthMiddleware(None).process_request(req)
+ context = req.environ[middleware.CONTEXT_ENV]
+ self.assertTrue(context['is_admin'])
+
+ def test_request_non_admin(self):
+ req = make_request()
+ req.headers[middleware.AUTH_TOKEN_HEADER] = 'NOT-ADMIN'
+ middleware.AdminTokenAuthMiddleware(None).process_request(req)
+ context = req.environ[middleware.CONTEXT_ENV]
+ self.assertFalse(context['is_admin'])
+
+
+class PostParamsMiddlewareTest(tests.TestCase):
+ def test_request_with_params(self):
+ req = make_request(body="arg1=one", method='POST')
+ middleware.PostParamsMiddleware(None).process_request(req)
+ params = req.environ[middleware.PARAMS_ENV]
+ self.assertEqual({"arg1": "one"}, params)
+
+
+class JsonBodyMiddlewareTest(tests.TestCase):
+ def test_request_with_params(self):
+ req = make_request(body='{"arg1": "one", "arg2": ["a"]}',
+ content_type='application/json',
+ method='POST')
+ middleware.JsonBodyMiddleware(None).process_request(req)
+ params = req.environ[middleware.PARAMS_ENV]
+ self.assertEqual({"arg1": "one", "arg2": ["a"]}, params)
+
+ def test_malformed_json(self):
+ req = make_request(body='{"arg1": "on',
+ content_type='application/json',
+ method='POST')
+ resp = middleware.JsonBodyMiddleware(None).process_request(req)
+ self.assertEqual(400, resp.status_int)
+
+ def test_not_dict_body(self):
+ req = make_request(body='42',
+ content_type='application/json',
+ method='POST')
+ resp = middleware.JsonBodyMiddleware(None).process_request(req)
+ self.assertEqual(400, resp.status_int)
+ self.assertTrue('valid JSON object' in resp.json['error']['message'])
+
+ def test_no_content_type(self):
+ req = make_request(body='{"arg1": "one", "arg2": ["a"]}',
+ method='POST')
+ middleware.JsonBodyMiddleware(None).process_request(req)
+ params = req.environ[middleware.PARAMS_ENV]
+ self.assertEqual({"arg1": "one", "arg2": ["a"]}, params)
+
+ def test_unrecognized_content_type(self):
+ req = make_request(body='{"arg1": "one", "arg2": ["a"]}',
+ content_type='text/plain',
+ method='POST')
+ resp = middleware.JsonBodyMiddleware(None).process_request(req)
+ self.assertEqual(400, resp.status_int)
+
+ def test_unrecognized_content_type_without_body(self):
+ req = make_request(content_type='text/plain',
+ method='GET')
+ middleware.JsonBodyMiddleware(None).process_request(req)
+ params = req.environ.get(middleware.PARAMS_ENV, {})
+ self.assertEqual({}, params)
diff --git a/keystone-moon/keystone/tests/unit/test_no_admin_token_auth.py b/keystone-moon/keystone/tests/unit/test_no_admin_token_auth.py
new file mode 100644
index 00000000..9f67fbd7
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/test_no_admin_token_auth.py
@@ -0,0 +1,59 @@
+# Copyright 2013 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import os
+
+import webtest
+
+from keystone.tests import unit as tests
+
+
+class TestNoAdminTokenAuth(tests.TestCase):
+ def setUp(self):
+ super(TestNoAdminTokenAuth, self).setUp()
+ self.load_backends()
+
+ self._generate_paste_config()
+
+ self.admin_app = webtest.TestApp(
+ self.loadapp(tests.dirs.tmp('no_admin_token_auth'), name='admin'),
+ extra_environ=dict(REMOTE_ADDR='127.0.0.1'))
+ self.addCleanup(setattr, self, 'admin_app', None)
+
+ def _generate_paste_config(self):
+ # Generate a file, based on keystone-paste.ini, that doesn't include
+ # admin_token_auth in the pipeline
+
+ with open(tests.dirs.etc('keystone-paste.ini'), 'r') as f:
+ contents = f.read()
+
+ new_contents = contents.replace(' admin_token_auth ', ' ')
+
+ filename = tests.dirs.tmp('no_admin_token_auth-paste.ini')
+ with open(filename, 'w') as f:
+ f.write(new_contents)
+ self.addCleanup(os.remove, filename)
+
+ def test_request_no_admin_token_auth(self):
+ # This test verifies that if the admin_token_auth middleware isn't
+ # in the paste pipeline that users can still make requests.
+
+ # Note(blk-u): Picked /v2.0/tenants because it's an operation that
+ # requires is_admin in the context, any operation that requires
+ # is_admin would work for this test.
+ REQ_PATH = '/v2.0/tenants'
+
+ # If the following does not raise, then the test is successful.
+ self.admin_app.get(REQ_PATH, headers={'X-Auth-Token': 'NotAdminToken'},
+ status=401)
diff --git a/keystone-moon/keystone/tests/unit/test_policy.py b/keystone-moon/keystone/tests/unit/test_policy.py
new file mode 100644
index 00000000..2c0c3995
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/test_policy.py
@@ -0,0 +1,228 @@
+# Copyright 2011 Piston Cloud Computing, Inc.
+# All Rights Reserved.
+
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import json
+
+import mock
+from oslo_policy import policy as common_policy
+import six
+from six.moves.urllib import request as urlrequest
+from testtools import matchers
+
+from keystone import exception
+from keystone.policy.backends import rules
+from keystone.tests import unit as tests
+from keystone.tests.unit.ksfixtures import temporaryfile
+
+
+class BasePolicyTestCase(tests.TestCase):
+ def setUp(self):
+ super(BasePolicyTestCase, self).setUp()
+ rules.reset()
+ self.addCleanup(rules.reset)
+ self.addCleanup(self.clear_cache_safely)
+
+ def clear_cache_safely(self):
+ if rules._ENFORCER:
+ rules._ENFORCER.clear()
+
+
+class PolicyFileTestCase(BasePolicyTestCase):
+ def setUp(self):
+ # self.tmpfilename should exist before setUp super is called
+ # this is to ensure it is available for the config_fixture in
+ # the config_overrides call.
+ self.tempfile = self.useFixture(temporaryfile.SecureTempFile())
+ self.tmpfilename = self.tempfile.file_name
+ super(PolicyFileTestCase, self).setUp()
+ self.target = {}
+
+ def config_overrides(self):
+ super(PolicyFileTestCase, self).config_overrides()
+ self.config_fixture.config(group='oslo_policy',
+ policy_file=self.tmpfilename)
+
+ def test_modified_policy_reloads(self):
+ action = "example:test"
+ empty_credentials = {}
+ with open(self.tmpfilename, "w") as policyfile:
+ policyfile.write("""{"example:test": []}""")
+ rules.enforce(empty_credentials, action, self.target)
+ with open(self.tmpfilename, "w") as policyfile:
+ policyfile.write("""{"example:test": ["false:false"]}""")
+ rules._ENFORCER.clear()
+ self.assertRaises(exception.ForbiddenAction, rules.enforce,
+ empty_credentials, action, self.target)
+
+ def test_invalid_policy_raises_error(self):
+ action = "example:test"
+ empty_credentials = {}
+ invalid_json = '{"example:test": [],}'
+ with open(self.tmpfilename, "w") as policyfile:
+ policyfile.write(invalid_json)
+ self.assertRaises(ValueError, rules.enforce,
+ empty_credentials, action, self.target)
+
+
+class PolicyTestCase(BasePolicyTestCase):
+ def setUp(self):
+ super(PolicyTestCase, self).setUp()
+ # NOTE(vish): preload rules to circumvent reloading from file
+ rules.init()
+ self.rules = {
+ "true": [],
+ "example:allowed": [],
+ "example:denied": [["false:false"]],
+ "example:get_http": [["http:http://www.example.com"]],
+ "example:my_file": [["role:compute_admin"],
+ ["project_id:%(project_id)s"]],
+ "example:early_and_fail": [["false:false", "rule:true"]],
+ "example:early_or_success": [["rule:true"], ["false:false"]],
+ "example:lowercase_admin": [["role:admin"], ["role:sysadmin"]],
+ "example:uppercase_admin": [["role:ADMIN"], ["role:sysadmin"]],
+ }
+
+ # NOTE(vish): then overload underlying policy engine
+ self._set_rules()
+ self.credentials = {}
+ self.target = {}
+
+ def _set_rules(self):
+ these_rules = common_policy.Rules.from_dict(self.rules)
+ rules._ENFORCER.set_rules(these_rules)
+
+ def test_enforce_nonexistent_action_throws(self):
+ action = "example:noexist"
+ self.assertRaises(exception.ForbiddenAction, rules.enforce,
+ self.credentials, action, self.target)
+
+ def test_enforce_bad_action_throws(self):
+ action = "example:denied"
+ self.assertRaises(exception.ForbiddenAction, rules.enforce,
+ self.credentials, action, self.target)
+
+ def test_enforce_good_action(self):
+ action = "example:allowed"
+ rules.enforce(self.credentials, action, self.target)
+
+ def test_enforce_http_true(self):
+
+ def fakeurlopen(url, post_data):
+ return six.StringIO("True")
+
+ action = "example:get_http"
+ target = {}
+ with mock.patch.object(urlrequest, 'urlopen', fakeurlopen):
+ result = rules.enforce(self.credentials, action, target)
+ self.assertTrue(result)
+
+ def test_enforce_http_false(self):
+
+ def fakeurlopen(url, post_data):
+ return six.StringIO("False")
+
+ action = "example:get_http"
+ target = {}
+ with mock.patch.object(urlrequest, 'urlopen', fakeurlopen):
+ self.assertRaises(exception.ForbiddenAction, rules.enforce,
+ self.credentials, action, target)
+
+ def test_templatized_enforcement(self):
+ target_mine = {'project_id': 'fake'}
+ target_not_mine = {'project_id': 'another'}
+ credentials = {'project_id': 'fake', 'roles': []}
+ action = "example:my_file"
+ rules.enforce(credentials, action, target_mine)
+ self.assertRaises(exception.ForbiddenAction, rules.enforce,
+ credentials, action, target_not_mine)
+
+ def test_early_AND_enforcement(self):
+ action = "example:early_and_fail"
+ self.assertRaises(exception.ForbiddenAction, rules.enforce,
+ self.credentials, action, self.target)
+
+ def test_early_OR_enforcement(self):
+ action = "example:early_or_success"
+ rules.enforce(self.credentials, action, self.target)
+
+ def test_ignore_case_role_check(self):
+ lowercase_action = "example:lowercase_admin"
+ uppercase_action = "example:uppercase_admin"
+ # NOTE(dprince) we mix case in the Admin role here to ensure
+ # case is ignored
+ admin_credentials = {'roles': ['AdMiN']}
+ rules.enforce(admin_credentials, lowercase_action, self.target)
+ rules.enforce(admin_credentials, uppercase_action, self.target)
+
+
+class DefaultPolicyTestCase(BasePolicyTestCase):
+ def setUp(self):
+ super(DefaultPolicyTestCase, self).setUp()
+ rules.init()
+
+ self.rules = {
+ "default": [],
+ "example:exist": [["false:false"]]
+ }
+ self._set_rules('default')
+ self.credentials = {}
+
+ # FIXME(gyee): latest Oslo policy Enforcer class reloads the rules in
+ # its enforce() method even though rules has been initialized via
+ # set_rules(). To make it easier to do our tests, we're going to
+ # monkeypatch load_roles() so it does nothing. This seem like a bug in
+ # Oslo policy as we shoudn't have to reload the rules if they have
+ # already been set using set_rules().
+ self._old_load_rules = rules._ENFORCER.load_rules
+ self.addCleanup(setattr, rules._ENFORCER, 'load_rules',
+ self._old_load_rules)
+ rules._ENFORCER.load_rules = lambda *args, **kwargs: None
+
+ def _set_rules(self, default_rule):
+ these_rules = common_policy.Rules.from_dict(self.rules, default_rule)
+ rules._ENFORCER.set_rules(these_rules)
+
+ def test_policy_called(self):
+ self.assertRaises(exception.ForbiddenAction, rules.enforce,
+ self.credentials, "example:exist", {})
+
+ def test_not_found_policy_calls_default(self):
+ rules.enforce(self.credentials, "example:noexist", {})
+
+ def test_default_not_found(self):
+ new_default_rule = "default_noexist"
+ # FIXME(gyee): need to overwrite the Enforcer's default_rule first
+ # as it is recreating the rules with its own default_rule instead
+ # of the default_rule passed in from set_rules(). I think this is a
+ # bug in Oslo policy.
+ rules._ENFORCER.default_rule = new_default_rule
+ self._set_rules(new_default_rule)
+ self.assertRaises(exception.ForbiddenAction, rules.enforce,
+ self.credentials, "example:noexist", {})
+
+
+class PolicyJsonTestCase(tests.TestCase):
+
+ def _load_entries(self, filename):
+ return set(json.load(open(filename)))
+
+ def test_json_examples_have_matching_entries(self):
+ policy_keys = self._load_entries(tests.dirs.etc('policy.json'))
+ cloud_policy_keys = self._load_entries(
+ tests.dirs.etc('policy.v3cloudsample.json'))
+
+ diffs = set(policy_keys).difference(set(cloud_policy_keys))
+
+ self.assertThat(diffs, matchers.Equals(set()))
diff --git a/keystone-moon/keystone/tests/unit/test_revoke.py b/keystone-moon/keystone/tests/unit/test_revoke.py
new file mode 100644
index 00000000..727eff78
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/test_revoke.py
@@ -0,0 +1,637 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+
+import datetime
+import uuid
+
+import mock
+from oslo_utils import timeutils
+from testtools import matchers
+
+from keystone.contrib.revoke import model
+from keystone import exception
+from keystone.tests import unit as tests
+from keystone.tests.unit import test_backend_sql
+from keystone.token import provider
+
+
+def _new_id():
+ return uuid.uuid4().hex
+
+
+def _future_time():
+ expire_delta = datetime.timedelta(seconds=1000)
+ future_time = timeutils.utcnow() + expire_delta
+ return future_time
+
+
+def _past_time():
+ expire_delta = datetime.timedelta(days=-1000)
+ past_time = timeutils.utcnow() + expire_delta
+ return past_time
+
+
+def _sample_blank_token():
+ issued_delta = datetime.timedelta(minutes=-2)
+ issued_at = timeutils.utcnow() + issued_delta
+ token_data = model.blank_token_data(issued_at)
+ return token_data
+
+
+def _matches(event, token_values):
+ """See if the token matches the revocation event.
+
+ Used as a secondary check on the logic to Check
+ By Tree Below: This is abrute force approach to checking.
+ Compare each attribute from the event with the corresponding
+ value from the token. If the event does not have a value for
+ the attribute, a match is still possible. If the event has a
+ value for the attribute, and it does not match the token, no match
+ is possible, so skip the remaining checks.
+
+ :param event one revocation event to match
+ :param token_values dictionary with set of values taken from the
+ token
+ :returns if the token matches the revocation event, indicating the
+ token has been revoked
+ """
+
+ # The token has three attributes that can match the user_id
+ if event.user_id is not None:
+ for attribute_name in ['user_id', 'trustor_id', 'trustee_id']:
+ if event.user_id == token_values[attribute_name]:
+ break
+ else:
+ return False
+
+ # The token has two attributes that can match the domain_id
+ if event.domain_id is not None:
+ for attribute_name in ['identity_domain_id', 'assignment_domain_id']:
+ if event.domain_id == token_values[attribute_name]:
+ break
+ else:
+ return False
+
+ if event.domain_scope_id is not None:
+ if event.domain_scope_id != token_values['assignment_domain_id']:
+ return False
+
+ # If any one check does not match, the while token does
+ # not match the event. The numerous return False indicate
+ # that the token is still valid and short-circuits the
+ # rest of the logic.
+ attribute_names = ['project_id',
+ 'expires_at', 'trust_id', 'consumer_id',
+ 'access_token_id', 'audit_id', 'audit_chain_id']
+ for attribute_name in attribute_names:
+ if getattr(event, attribute_name) is not None:
+ if (getattr(event, attribute_name) !=
+ token_values[attribute_name]):
+ return False
+
+ if event.role_id is not None:
+ roles = token_values['roles']
+ for role in roles:
+ if event.role_id == role:
+ break
+ else:
+ return False
+ if token_values['issued_at'] > event.issued_before:
+ return False
+ return True
+
+
+class RevokeTests(object):
+ def test_list(self):
+ self.revoke_api.revoke_by_user(user_id=1)
+ self.assertEqual(1, len(self.revoke_api.list_events()))
+
+ self.revoke_api.revoke_by_user(user_id=2)
+ self.assertEqual(2, len(self.revoke_api.list_events()))
+
+ def test_list_since(self):
+ self.revoke_api.revoke_by_user(user_id=1)
+ self.revoke_api.revoke_by_user(user_id=2)
+ past = timeutils.utcnow() - datetime.timedelta(seconds=1000)
+ self.assertEqual(2, len(self.revoke_api.list_events(past)))
+ future = timeutils.utcnow() + datetime.timedelta(seconds=1000)
+ self.assertEqual(0, len(self.revoke_api.list_events(future)))
+
+ def test_past_expiry_are_removed(self):
+ user_id = 1
+ self.revoke_api.revoke_by_expiration(user_id, _future_time())
+ self.assertEqual(1, len(self.revoke_api.list_events()))
+ event = model.RevokeEvent()
+ event.revoked_at = _past_time()
+ self.revoke_api.revoke(event)
+ self.assertEqual(1, len(self.revoke_api.list_events()))
+
+ @mock.patch.object(timeutils, 'utcnow')
+ def test_expired_events_removed_validate_token_success(self, mock_utcnow):
+ def _sample_token_values():
+ token = _sample_blank_token()
+ token['expires_at'] = timeutils.isotime(_future_time(),
+ subsecond=True)
+ return token
+
+ now = datetime.datetime.utcnow()
+ now_plus_2h = now + datetime.timedelta(hours=2)
+ mock_utcnow.return_value = now
+
+ # Build a token and validate it. This will seed the cache for the
+ # future 'synchronize' call.
+ token_values = _sample_token_values()
+
+ user_id = _new_id()
+ self.revoke_api.revoke_by_user(user_id)
+ token_values['user_id'] = user_id
+ self.assertRaises(exception.TokenNotFound,
+ self.revoke_api.check_token,
+ token_values)
+
+ # Move our clock forward by 2h, build a new token and validate it.
+ # 'synchronize' should now be exercised and remove old expired events
+ mock_utcnow.return_value = now_plus_2h
+ self.revoke_api.revoke_by_expiration(_new_id(), now_plus_2h)
+ # should no longer throw an exception
+ self.revoke_api.check_token(token_values)
+
+ def test_revoke_by_expiration_project_and_domain_fails(self):
+ user_id = _new_id()
+ expires_at = timeutils.isotime(_future_time(), subsecond=True)
+ domain_id = _new_id()
+ project_id = _new_id()
+ self.assertThat(
+ lambda: self.revoke_api.revoke_by_expiration(
+ user_id, expires_at, domain_id=domain_id,
+ project_id=project_id),
+ matchers.raises(exception.UnexpectedError))
+
+
+class SqlRevokeTests(test_backend_sql.SqlTests, RevokeTests):
+ def config_overrides(self):
+ super(SqlRevokeTests, self).config_overrides()
+ self.config_fixture.config(
+ group='revoke',
+ driver='keystone.contrib.revoke.backends.sql.Revoke')
+ self.config_fixture.config(
+ group='token',
+ provider='keystone.token.providers.pki.Provider',
+ revoke_by_id=False)
+
+
+class KvsRevokeTests(tests.TestCase, RevokeTests):
+ def config_overrides(self):
+ super(KvsRevokeTests, self).config_overrides()
+ self.config_fixture.config(
+ group='revoke',
+ driver='keystone.contrib.revoke.backends.kvs.Revoke')
+ self.config_fixture.config(
+ group='token',
+ provider='keystone.token.providers.pki.Provider',
+ revoke_by_id=False)
+
+ def setUp(self):
+ super(KvsRevokeTests, self).setUp()
+ self.load_backends()
+
+
+class RevokeTreeTests(tests.TestCase):
+ def setUp(self):
+ super(RevokeTreeTests, self).setUp()
+ self.events = []
+ self.tree = model.RevokeTree()
+ self._sample_data()
+
+ def _sample_data(self):
+ user_ids = []
+ project_ids = []
+ role_ids = []
+ for i in range(0, 3):
+ user_ids.append(_new_id())
+ project_ids.append(_new_id())
+ role_ids.append(_new_id())
+
+ project_tokens = []
+ i = len(project_tokens)
+ project_tokens.append(_sample_blank_token())
+ project_tokens[i]['user_id'] = user_ids[0]
+ project_tokens[i]['project_id'] = project_ids[0]
+ project_tokens[i]['roles'] = [role_ids[1]]
+
+ i = len(project_tokens)
+ project_tokens.append(_sample_blank_token())
+ project_tokens[i]['user_id'] = user_ids[1]
+ project_tokens[i]['project_id'] = project_ids[0]
+ project_tokens[i]['roles'] = [role_ids[0]]
+
+ i = len(project_tokens)
+ project_tokens.append(_sample_blank_token())
+ project_tokens[i]['user_id'] = user_ids[0]
+ project_tokens[i]['project_id'] = project_ids[1]
+ project_tokens[i]['roles'] = [role_ids[0]]
+
+ token_to_revoke = _sample_blank_token()
+ token_to_revoke['user_id'] = user_ids[0]
+ token_to_revoke['project_id'] = project_ids[0]
+ token_to_revoke['roles'] = [role_ids[0]]
+
+ self.project_tokens = project_tokens
+ self.user_ids = user_ids
+ self.project_ids = project_ids
+ self.role_ids = role_ids
+ self.token_to_revoke = token_to_revoke
+
+ def _assertTokenRevoked(self, token_data):
+ self.assertTrue(any([_matches(e, token_data) for e in self.events]))
+ return self.assertTrue(self.tree.is_revoked(token_data),
+ 'Token should be revoked')
+
+ def _assertTokenNotRevoked(self, token_data):
+ self.assertFalse(any([_matches(e, token_data) for e in self.events]))
+ return self.assertFalse(self.tree.is_revoked(token_data),
+ 'Token should not be revoked')
+
+ def _revoke_by_user(self, user_id):
+ return self.tree.add_event(
+ model.RevokeEvent(user_id=user_id))
+
+ def _revoke_by_audit_id(self, audit_id):
+ event = self.tree.add_event(
+ model.RevokeEvent(audit_id=audit_id))
+ self.events.append(event)
+ return event
+
+ def _revoke_by_audit_chain_id(self, audit_chain_id, project_id=None,
+ domain_id=None):
+ event = self.tree.add_event(
+ model.RevokeEvent(audit_chain_id=audit_chain_id,
+ project_id=project_id,
+ domain_id=domain_id)
+ )
+ self.events.append(event)
+ return event
+
+ def _revoke_by_expiration(self, user_id, expires_at, project_id=None,
+ domain_id=None):
+ event = self.tree.add_event(
+ model.RevokeEvent(user_id=user_id,
+ expires_at=expires_at,
+ project_id=project_id,
+ domain_id=domain_id))
+ self.events.append(event)
+ return event
+
+ def _revoke_by_grant(self, role_id, user_id=None,
+ domain_id=None, project_id=None):
+ event = self.tree.add_event(
+ model.RevokeEvent(user_id=user_id,
+ role_id=role_id,
+ domain_id=domain_id,
+ project_id=project_id))
+ self.events.append(event)
+ return event
+
+ def _revoke_by_user_and_project(self, user_id, project_id):
+ event = self.tree.add_event(
+ model.RevokeEvent(project_id=project_id,
+ user_id=user_id))
+ self.events.append(event)
+ return event
+
+ def _revoke_by_project_role_assignment(self, project_id, role_id):
+ event = self.tree.add_event(
+ model.RevokeEvent(project_id=project_id,
+ role_id=role_id))
+ self.events.append(event)
+ return event
+
+ def _revoke_by_domain_role_assignment(self, domain_id, role_id):
+ event = self.tree.add_event(
+ model.RevokeEvent(domain_id=domain_id,
+ role_id=role_id))
+ self.events.append(event)
+ return event
+
+ def _revoke_by_domain(self, domain_id):
+ event = self.tree.add_event(model.RevokeEvent(domain_id=domain_id))
+ self.events.append(event)
+
+ def _user_field_test(self, field_name):
+ user_id = _new_id()
+ event = self._revoke_by_user(user_id)
+ self.events.append(event)
+ token_data_u1 = _sample_blank_token()
+ token_data_u1[field_name] = user_id
+ self._assertTokenRevoked(token_data_u1)
+ token_data_u2 = _sample_blank_token()
+ token_data_u2[field_name] = _new_id()
+ self._assertTokenNotRevoked(token_data_u2)
+ self.tree.remove_event(event)
+ self.events.remove(event)
+ self._assertTokenNotRevoked(token_data_u1)
+
+ def test_revoke_by_user(self):
+ self._user_field_test('user_id')
+
+ def test_revoke_by_user_matches_trustee(self):
+ self._user_field_test('trustee_id')
+
+ def test_revoke_by_user_matches_trustor(self):
+ self._user_field_test('trustor_id')
+
+ def test_by_user_expiration(self):
+ future_time = _future_time()
+
+ user_id = 1
+ event = self._revoke_by_expiration(user_id, future_time)
+ token_data_1 = _sample_blank_token()
+ token_data_1['user_id'] = user_id
+ token_data_1['expires_at'] = future_time.replace(microsecond=0)
+ self._assertTokenRevoked(token_data_1)
+
+ token_data_2 = _sample_blank_token()
+ token_data_2['user_id'] = user_id
+ expire_delta = datetime.timedelta(seconds=2000)
+ future_time = timeutils.utcnow() + expire_delta
+ token_data_2['expires_at'] = future_time
+ self._assertTokenNotRevoked(token_data_2)
+
+ self.remove_event(event)
+ self._assertTokenNotRevoked(token_data_1)
+
+ def test_revoke_by_audit_id(self):
+ audit_id = provider.audit_info(parent_audit_id=None)[0]
+ token_data_1 = _sample_blank_token()
+ # Audit ID and Audit Chain ID are populated with the same value
+ # if the token is an original token
+ token_data_1['audit_id'] = audit_id
+ token_data_1['audit_chain_id'] = audit_id
+ event = self._revoke_by_audit_id(audit_id)
+ self._assertTokenRevoked(token_data_1)
+
+ audit_id_2 = provider.audit_info(parent_audit_id=audit_id)[0]
+ token_data_2 = _sample_blank_token()
+ token_data_2['audit_id'] = audit_id_2
+ token_data_2['audit_chain_id'] = audit_id
+ self._assertTokenNotRevoked(token_data_2)
+
+ self.remove_event(event)
+ self._assertTokenNotRevoked(token_data_1)
+
+ def test_revoke_by_audit_chain_id(self):
+ audit_id = provider.audit_info(parent_audit_id=None)[0]
+ token_data_1 = _sample_blank_token()
+ # Audit ID and Audit Chain ID are populated with the same value
+ # if the token is an original token
+ token_data_1['audit_id'] = audit_id
+ token_data_1['audit_chain_id'] = audit_id
+ event = self._revoke_by_audit_chain_id(audit_id)
+ self._assertTokenRevoked(token_data_1)
+
+ audit_id_2 = provider.audit_info(parent_audit_id=audit_id)[0]
+ token_data_2 = _sample_blank_token()
+ token_data_2['audit_id'] = audit_id_2
+ token_data_2['audit_chain_id'] = audit_id
+ self._assertTokenRevoked(token_data_2)
+
+ self.remove_event(event)
+ self._assertTokenNotRevoked(token_data_1)
+ self._assertTokenNotRevoked(token_data_2)
+
+ def test_by_user_project(self):
+ # When a user has a project-scoped token and the project-scoped token
+ # is revoked then the token is revoked.
+
+ user_id = _new_id()
+ project_id = _new_id()
+
+ future_time = _future_time()
+
+ token_data = _sample_blank_token()
+ token_data['user_id'] = user_id
+ token_data['project_id'] = project_id
+ token_data['expires_at'] = future_time.replace(microsecond=0)
+
+ self._revoke_by_expiration(user_id, future_time, project_id=project_id)
+ self._assertTokenRevoked(token_data)
+
+ def test_by_user_domain(self):
+ # When a user has a domain-scoped token and the domain-scoped token
+ # is revoked then the token is revoked.
+
+ user_id = _new_id()
+ domain_id = _new_id()
+
+ future_time = _future_time()
+
+ token_data = _sample_blank_token()
+ token_data['user_id'] = user_id
+ token_data['assignment_domain_id'] = domain_id
+ token_data['expires_at'] = future_time.replace(microsecond=0)
+
+ self._revoke_by_expiration(user_id, future_time, domain_id=domain_id)
+ self._assertTokenRevoked(token_data)
+
+ def remove_event(self, event):
+ self.events.remove(event)
+ self.tree.remove_event(event)
+
+ def test_by_project_grant(self):
+ token_to_revoke = self.token_to_revoke
+ tokens = self.project_tokens
+
+ self._assertTokenNotRevoked(token_to_revoke)
+ for token in tokens:
+ self._assertTokenNotRevoked(token)
+
+ event = self._revoke_by_grant(role_id=self.role_ids[0],
+ user_id=self.user_ids[0],
+ project_id=self.project_ids[0])
+
+ self._assertTokenRevoked(token_to_revoke)
+ for token in tokens:
+ self._assertTokenNotRevoked(token)
+
+ self.remove_event(event)
+
+ self._assertTokenNotRevoked(token_to_revoke)
+ for token in tokens:
+ self._assertTokenNotRevoked(token)
+
+ token_to_revoke['roles'] = [self.role_ids[0],
+ self.role_ids[1],
+ self.role_ids[2]]
+
+ event = self._revoke_by_grant(role_id=self.role_ids[0],
+ user_id=self.user_ids[0],
+ project_id=self.project_ids[0])
+ self._assertTokenRevoked(token_to_revoke)
+ self.remove_event(event)
+ self._assertTokenNotRevoked(token_to_revoke)
+
+ event = self._revoke_by_grant(role_id=self.role_ids[1],
+ user_id=self.user_ids[0],
+ project_id=self.project_ids[0])
+ self._assertTokenRevoked(token_to_revoke)
+ self.remove_event(event)
+ self._assertTokenNotRevoked(token_to_revoke)
+
+ self._revoke_by_grant(role_id=self.role_ids[0],
+ user_id=self.user_ids[0],
+ project_id=self.project_ids[0])
+ self._revoke_by_grant(role_id=self.role_ids[1],
+ user_id=self.user_ids[0],
+ project_id=self.project_ids[0])
+ self._revoke_by_grant(role_id=self.role_ids[2],
+ user_id=self.user_ids[0],
+ project_id=self.project_ids[0])
+ self._assertTokenRevoked(token_to_revoke)
+
+ def test_by_project_and_user_and_role(self):
+ user_id1 = _new_id()
+ user_id2 = _new_id()
+ project_id = _new_id()
+ self.events.append(self._revoke_by_user(user_id1))
+ self.events.append(
+ self._revoke_by_user_and_project(user_id2, project_id))
+ token_data = _sample_blank_token()
+ token_data['user_id'] = user_id2
+ token_data['project_id'] = project_id
+ self._assertTokenRevoked(token_data)
+
+ def test_by_domain_user(self):
+ # If revoke a domain, then a token for a user in the domain is revoked
+
+ user_id = _new_id()
+ domain_id = _new_id()
+
+ token_data = _sample_blank_token()
+ token_data['user_id'] = user_id
+ token_data['identity_domain_id'] = domain_id
+
+ self._revoke_by_domain(domain_id)
+
+ self._assertTokenRevoked(token_data)
+
+ def test_by_domain_project(self):
+ # If revoke a domain, then a token scoped to a project in the domain
+ # is revoked.
+
+ user_id = _new_id()
+ user_domain_id = _new_id()
+
+ project_id = _new_id()
+ project_domain_id = _new_id()
+
+ token_data = _sample_blank_token()
+ token_data['user_id'] = user_id
+ token_data['identity_domain_id'] = user_domain_id
+ token_data['project_id'] = project_id
+ token_data['assignment_domain_id'] = project_domain_id
+
+ self._revoke_by_domain(project_domain_id)
+
+ self._assertTokenRevoked(token_data)
+
+ def test_by_domain_domain(self):
+ # If revoke a domain, then a token scoped to the domain is revoked.
+
+ user_id = _new_id()
+ user_domain_id = _new_id()
+
+ domain_id = _new_id()
+
+ token_data = _sample_blank_token()
+ token_data['user_id'] = user_id
+ token_data['identity_domain_id'] = user_domain_id
+ token_data['assignment_domain_id'] = domain_id
+
+ self._revoke_by_domain(domain_id)
+
+ self._assertTokenRevoked(token_data)
+
+ def _assertEmpty(self, collection):
+ return self.assertEqual(0, len(collection), "collection not empty")
+
+ def _assertEventsMatchIteration(self, turn):
+ self.assertEqual(1, len(self.tree.revoke_map))
+ self.assertEqual(turn + 1, len(self.tree.revoke_map
+ ['trust_id=*']
+ ['consumer_id=*']
+ ['access_token_id=*']
+ ['audit_id=*']
+ ['audit_chain_id=*']))
+ # two different functions add domain_ids, +1 for None
+ self.assertEqual(2 * turn + 1, len(self.tree.revoke_map
+ ['trust_id=*']
+ ['consumer_id=*']
+ ['access_token_id=*']
+ ['audit_id=*']
+ ['audit_chain_id=*']
+ ['expires_at=*']))
+ # two different functions add project_ids, +1 for None
+ self.assertEqual(2 * turn + 1, len(self.tree.revoke_map
+ ['trust_id=*']
+ ['consumer_id=*']
+ ['access_token_id=*']
+ ['audit_id=*']
+ ['audit_chain_id=*']
+ ['expires_at=*']
+ ['domain_id=*']))
+ # 10 users added
+ self.assertEqual(turn, len(self.tree.revoke_map
+ ['trust_id=*']
+ ['consumer_id=*']
+ ['access_token_id=*']
+ ['audit_id=*']
+ ['audit_chain_id=*']
+ ['expires_at=*']
+ ['domain_id=*']
+ ['project_id=*']))
+
+ def test_cleanup(self):
+ events = self.events
+ self._assertEmpty(self.tree.revoke_map)
+ expiry_base_time = _future_time()
+ for i in range(0, 10):
+ events.append(
+ self._revoke_by_user(_new_id()))
+
+ args = (_new_id(),
+ expiry_base_time + datetime.timedelta(seconds=i))
+ events.append(
+ self._revoke_by_expiration(*args))
+
+ self.assertEqual(i + 2, len(self.tree.revoke_map
+ ['trust_id=*']
+ ['consumer_id=*']
+ ['access_token_id=*']
+ ['audit_id=*']
+ ['audit_chain_id=*']),
+ 'adding %s to %s' % (args,
+ self.tree.revoke_map))
+
+ events.append(
+ self._revoke_by_project_role_assignment(_new_id(), _new_id()))
+ events.append(
+ self._revoke_by_domain_role_assignment(_new_id(), _new_id()))
+ events.append(
+ self._revoke_by_domain_role_assignment(_new_id(), _new_id()))
+ events.append(
+ self._revoke_by_user_and_project(_new_id(), _new_id()))
+ self._assertEventsMatchIteration(i + 1)
+
+ for event in self.events:
+ self.tree.remove_event(event)
+ self._assertEmpty(self.tree.revoke_map)
diff --git a/keystone-moon/keystone/tests/unit/test_singular_plural.py b/keystone-moon/keystone/tests/unit/test_singular_plural.py
new file mode 100644
index 00000000..b07ea8d5
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/test_singular_plural.py
@@ -0,0 +1,48 @@
+# Copyright 2012 Red Hat, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import ast
+
+from keystone.contrib.admin_crud import core as admin_crud_core
+from keystone.contrib.s3 import core as s3_core
+from keystone.contrib.user_crud import core as user_crud_core
+from keystone.identity import core as identity_core
+from keystone import service
+
+
+class TestSingularPlural(object):
+ def test_keyword_arg_condition_or_methods(self):
+ """Raise if we see a keyword arg called 'condition' or 'methods'."""
+ modules = [admin_crud_core, s3_core,
+ user_crud_core, identity_core, service]
+ for module in modules:
+ filename = module.__file__
+ if filename.endswith(".pyc"):
+ # In Python 2, the .py and .pyc files are in the same dir.
+ filename = filename[:-1]
+ with open(filename) as fil:
+ source = fil.read()
+ module = ast.parse(source, filename)
+ last_stmt_or_expr = None
+ for node in ast.walk(module):
+ if isinstance(node, ast.stmt) or isinstance(node, ast.expr):
+ # keyword nodes don't have line numbers, so we need to
+ # get that information from the parent stmt or expr.
+ last_stmt_or_expr = node
+ elif isinstance(node, ast.keyword):
+ for bad_word in ["condition", "methods"]:
+ if node.arg == bad_word:
+ raise AssertionError(
+ "Suspicious name '%s' at %s line %s" %
+ (bad_word, filename, last_stmt_or_expr.lineno))
diff --git a/keystone-moon/keystone/tests/unit/test_sql_livetest.py b/keystone-moon/keystone/tests/unit/test_sql_livetest.py
new file mode 100644
index 00000000..96ee6c70
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/test_sql_livetest.py
@@ -0,0 +1,73 @@
+# Copyright 2013 Red Hat, Inc
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from keystone.tests import unit as tests
+from keystone.tests.unit import test_sql_migrate_extensions
+from keystone.tests.unit import test_sql_upgrade
+
+
+class PostgresqlMigrateTests(test_sql_upgrade.SqlUpgradeTests):
+ def setUp(self):
+ self.skip_if_env_not_set('ENABLE_LIVE_POSTGRES_TEST')
+ super(PostgresqlMigrateTests, self).setUp()
+
+ def config_files(self):
+ files = super(PostgresqlMigrateTests, self).config_files()
+ files.append(tests.dirs.tests_conf("backend_postgresql.conf"))
+ return files
+
+
+class MysqlMigrateTests(test_sql_upgrade.SqlUpgradeTests):
+ def setUp(self):
+ self.skip_if_env_not_set('ENABLE_LIVE_MYSQL_TEST')
+ super(MysqlMigrateTests, self).setUp()
+
+ def config_files(self):
+ files = super(MysqlMigrateTests, self).config_files()
+ files.append(tests.dirs.tests_conf("backend_mysql.conf"))
+ return files
+
+
+class PostgresqlRevokeExtensionsTests(
+ test_sql_migrate_extensions.RevokeExtension):
+ def setUp(self):
+ self.skip_if_env_not_set('ENABLE_LIVE_POSTGRES_TEST')
+ super(PostgresqlRevokeExtensionsTests, self).setUp()
+
+ def config_files(self):
+ files = super(PostgresqlRevokeExtensionsTests, self).config_files()
+ files.append(tests.dirs.tests_conf("backend_postgresql.conf"))
+ return files
+
+
+class MysqlRevokeExtensionsTests(test_sql_migrate_extensions.RevokeExtension):
+ def setUp(self):
+ self.skip_if_env_not_set('ENABLE_LIVE_MYSQL_TEST')
+ super(MysqlRevokeExtensionsTests, self).setUp()
+
+ def config_files(self):
+ files = super(MysqlRevokeExtensionsTests, self).config_files()
+ files.append(tests.dirs.tests_conf("backend_mysql.conf"))
+ return files
+
+
+class Db2MigrateTests(test_sql_upgrade.SqlUpgradeTests):
+ def setUp(self):
+ self.skip_if_env_not_set('ENABLE_LIVE_DB2_TEST')
+ super(Db2MigrateTests, self).setUp()
+
+ def config_files(self):
+ files = super(Db2MigrateTests, self).config_files()
+ files.append(tests.dirs.tests_conf("backend_db2.conf"))
+ return files
diff --git a/keystone-moon/keystone/tests/unit/test_sql_migrate_extensions.py b/keystone-moon/keystone/tests/unit/test_sql_migrate_extensions.py
new file mode 100644
index 00000000..edfb91d7
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/test_sql_migrate_extensions.py
@@ -0,0 +1,380 @@
+# Copyright 2012 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+"""
+To run these tests against a live database:
+
+1. Modify the file `keystone/tests/unit/config_files/backend_sql.conf` to use
+ the connection for your live database.
+2. Set up a blank, live database.
+3. Run the tests using::
+
+ tox -e py27 -- keystone.tests.unit.test_sql_migrate_extensions
+
+WARNING::
+
+ Your database will be wiped.
+
+ Do not do this against a Database with valuable data as
+ all data will be lost.
+"""
+
+import sqlalchemy
+import uuid
+
+from oslo_db import exception as db_exception
+from oslo_db.sqlalchemy import utils
+
+from keystone.contrib import endpoint_filter
+from keystone.contrib import endpoint_policy
+from keystone.contrib import example
+from keystone.contrib import federation
+from keystone.contrib import oauth1
+from keystone.contrib import revoke
+from keystone.tests.unit import test_sql_upgrade
+
+
+class SqlUpgradeExampleExtension(test_sql_upgrade.SqlMigrateBase):
+ def repo_package(self):
+ return example
+
+ def test_upgrade(self):
+ self.assertTableDoesNotExist('example')
+ self.upgrade(1, repository=self.repo_path)
+ self.assertTableColumns('example', ['id', 'type', 'extra'])
+
+ def test_downgrade(self):
+ self.upgrade(1, repository=self.repo_path)
+ self.assertTableColumns('example', ['id', 'type', 'extra'])
+ self.downgrade(0, repository=self.repo_path)
+ self.assertTableDoesNotExist('example')
+
+
+class SqlUpgradeOAuth1Extension(test_sql_upgrade.SqlMigrateBase):
+ def repo_package(self):
+ return oauth1
+
+ def upgrade(self, version):
+ super(SqlUpgradeOAuth1Extension, self).upgrade(
+ version, repository=self.repo_path)
+
+ def downgrade(self, version):
+ super(SqlUpgradeOAuth1Extension, self).downgrade(
+ version, repository=self.repo_path)
+
+ def _assert_v1_3_tables(self):
+ self.assertTableColumns('consumer',
+ ['id',
+ 'description',
+ 'secret',
+ 'extra'])
+ self.assertTableColumns('request_token',
+ ['id',
+ 'request_secret',
+ 'verifier',
+ 'authorizing_user_id',
+ 'requested_project_id',
+ 'requested_roles',
+ 'consumer_id',
+ 'expires_at'])
+ self.assertTableColumns('access_token',
+ ['id',
+ 'access_secret',
+ 'authorizing_user_id',
+ 'project_id',
+ 'requested_roles',
+ 'consumer_id',
+ 'expires_at'])
+
+ def _assert_v4_later_tables(self):
+ self.assertTableColumns('consumer',
+ ['id',
+ 'description',
+ 'secret',
+ 'extra'])
+ self.assertTableColumns('request_token',
+ ['id',
+ 'request_secret',
+ 'verifier',
+ 'authorizing_user_id',
+ 'requested_project_id',
+ 'role_ids',
+ 'consumer_id',
+ 'expires_at'])
+ self.assertTableColumns('access_token',
+ ['id',
+ 'access_secret',
+ 'authorizing_user_id',
+ 'project_id',
+ 'role_ids',
+ 'consumer_id',
+ 'expires_at'])
+
+ def test_upgrade(self):
+ self.assertTableDoesNotExist('consumer')
+ self.assertTableDoesNotExist('request_token')
+ self.assertTableDoesNotExist('access_token')
+ self.upgrade(1)
+ self._assert_v1_3_tables()
+
+ # NOTE(blk-u): Migrations 2-3 don't modify the tables in a way that we
+ # can easily test for.
+
+ self.upgrade(4)
+ self._assert_v4_later_tables()
+
+ self.upgrade(5)
+ self._assert_v4_later_tables()
+
+ def test_downgrade(self):
+ self.upgrade(5)
+ self._assert_v4_later_tables()
+ self.downgrade(3)
+ self._assert_v1_3_tables()
+ self.downgrade(1)
+ self._assert_v1_3_tables()
+ self.downgrade(0)
+ self.assertTableDoesNotExist('consumer')
+ self.assertTableDoesNotExist('request_token')
+ self.assertTableDoesNotExist('access_token')
+
+
+class EndpointFilterExtension(test_sql_upgrade.SqlMigrateBase):
+ def repo_package(self):
+ return endpoint_filter
+
+ def upgrade(self, version):
+ super(EndpointFilterExtension, self).upgrade(
+ version, repository=self.repo_path)
+
+ def downgrade(self, version):
+ super(EndpointFilterExtension, self).downgrade(
+ version, repository=self.repo_path)
+
+ def _assert_v1_tables(self):
+ self.assertTableColumns('project_endpoint',
+ ['endpoint_id', 'project_id'])
+ self.assertTableDoesNotExist('endpoint_group')
+ self.assertTableDoesNotExist('project_endpoint_group')
+
+ def _assert_v2_tables(self):
+ self.assertTableColumns('project_endpoint',
+ ['endpoint_id', 'project_id'])
+ self.assertTableColumns('endpoint_group',
+ ['id', 'name', 'description', 'filters'])
+ self.assertTableColumns('project_endpoint_group',
+ ['endpoint_group_id', 'project_id'])
+
+ def test_upgrade(self):
+ self.assertTableDoesNotExist('project_endpoint')
+ self.upgrade(1)
+ self._assert_v1_tables()
+ self.assertTableColumns('project_endpoint',
+ ['endpoint_id', 'project_id'])
+ self.upgrade(2)
+ self._assert_v2_tables()
+
+ def test_downgrade(self):
+ self.upgrade(2)
+ self._assert_v2_tables()
+ self.downgrade(1)
+ self._assert_v1_tables()
+ self.downgrade(0)
+ self.assertTableDoesNotExist('project_endpoint')
+
+
+class EndpointPolicyExtension(test_sql_upgrade.SqlMigrateBase):
+ def repo_package(self):
+ return endpoint_policy
+
+ def test_upgrade(self):
+ self.assertTableDoesNotExist('policy_association')
+ self.upgrade(1, repository=self.repo_path)
+ self.assertTableColumns('policy_association',
+ ['id', 'policy_id', 'endpoint_id',
+ 'service_id', 'region_id'])
+
+ def test_downgrade(self):
+ self.upgrade(1, repository=self.repo_path)
+ self.assertTableColumns('policy_association',
+ ['id', 'policy_id', 'endpoint_id',
+ 'service_id', 'region_id'])
+ self.downgrade(0, repository=self.repo_path)
+ self.assertTableDoesNotExist('policy_association')
+
+
+class FederationExtension(test_sql_upgrade.SqlMigrateBase):
+ """Test class for ensuring the Federation SQL."""
+
+ def setUp(self):
+ super(FederationExtension, self).setUp()
+ self.identity_provider = 'identity_provider'
+ self.federation_protocol = 'federation_protocol'
+ self.service_provider = 'service_provider'
+ self.mapping = 'mapping'
+
+ def repo_package(self):
+ return federation
+
+ def insert_dict(self, session, table_name, d):
+ """Naively inserts key-value pairs into a table, given a dictionary."""
+ table = sqlalchemy.Table(table_name, self.metadata, autoload=True)
+ insert = table.insert().values(**d)
+ session.execute(insert)
+ session.commit()
+
+ def test_upgrade(self):
+ self.assertTableDoesNotExist(self.identity_provider)
+ self.assertTableDoesNotExist(self.federation_protocol)
+ self.assertTableDoesNotExist(self.mapping)
+
+ self.upgrade(1, repository=self.repo_path)
+ self.assertTableColumns(self.identity_provider,
+ ['id',
+ 'enabled',
+ 'description'])
+
+ self.assertTableColumns(self.federation_protocol,
+ ['id',
+ 'idp_id',
+ 'mapping_id'])
+
+ self.upgrade(2, repository=self.repo_path)
+ self.assertTableColumns(self.mapping,
+ ['id', 'rules'])
+
+ federation_protocol = utils.get_table(
+ self.engine,
+ 'federation_protocol')
+ with self.engine.begin() as conn:
+ conn.execute(federation_protocol.insert(), id=0, idp_id=1)
+ self.upgrade(3, repository=self.repo_path)
+ federation_protocol = utils.get_table(
+ self.engine,
+ 'federation_protocol')
+ self.assertFalse(federation_protocol.c.mapping_id.nullable)
+
+ def test_downgrade(self):
+ self.upgrade(3, repository=self.repo_path)
+ self.assertTableColumns(self.identity_provider,
+ ['id', 'enabled', 'description'])
+ self.assertTableColumns(self.federation_protocol,
+ ['id', 'idp_id', 'mapping_id'])
+ self.assertTableColumns(self.mapping,
+ ['id', 'rules'])
+
+ self.downgrade(2, repository=self.repo_path)
+ federation_protocol = utils.get_table(
+ self.engine,
+ 'federation_protocol')
+ self.assertTrue(federation_protocol.c.mapping_id.nullable)
+
+ self.downgrade(0, repository=self.repo_path)
+ self.assertTableDoesNotExist(self.identity_provider)
+ self.assertTableDoesNotExist(self.federation_protocol)
+ self.assertTableDoesNotExist(self.mapping)
+
+ def test_fixup_service_provider_attributes(self):
+ self.upgrade(6, repository=self.repo_path)
+ self.assertTableColumns(self.service_provider,
+ ['id', 'description', 'enabled', 'auth_url',
+ 'sp_url'])
+
+ session = self.Session()
+ sp1 = {'id': uuid.uuid4().hex,
+ 'auth_url': None,
+ 'sp_url': uuid.uuid4().hex,
+ 'description': uuid.uuid4().hex,
+ 'enabled': True}
+ sp2 = {'id': uuid.uuid4().hex,
+ 'auth_url': uuid.uuid4().hex,
+ 'sp_url': None,
+ 'description': uuid.uuid4().hex,
+ 'enabled': True}
+ sp3 = {'id': uuid.uuid4().hex,
+ 'auth_url': None,
+ 'sp_url': None,
+ 'description': uuid.uuid4().hex,
+ 'enabled': True}
+
+ # Insert with 'auth_url' or 'sp_url' set to null must fail
+ self.assertRaises(db_exception.DBError,
+ self.insert_dict,
+ session,
+ self.service_provider,
+ sp1)
+ self.assertRaises(db_exception.DBError,
+ self.insert_dict,
+ session,
+ self.service_provider,
+ sp2)
+ self.assertRaises(db_exception.DBError,
+ self.insert_dict,
+ session,
+ self.service_provider,
+ sp3)
+
+ session.close()
+ self.downgrade(5, repository=self.repo_path)
+ self.assertTableColumns(self.service_provider,
+ ['id', 'description', 'enabled', 'auth_url',
+ 'sp_url'])
+ session = self.Session()
+ self.metadata.clear()
+
+ # Before the migration, the table should accept null values
+ self.insert_dict(session, self.service_provider, sp1)
+ self.insert_dict(session, self.service_provider, sp2)
+ self.insert_dict(session, self.service_provider, sp3)
+
+ # Check if null values are updated to empty string when migrating
+ session.close()
+ self.upgrade(6, repository=self.repo_path)
+ sp_table = sqlalchemy.Table(self.service_provider,
+ self.metadata,
+ autoload=True)
+ session = self.Session()
+ self.metadata.clear()
+
+ sp = session.query(sp_table).filter(sp_table.c.id == sp1['id'])[0]
+ self.assertEqual('', sp.auth_url)
+
+ sp = session.query(sp_table).filter(sp_table.c.id == sp2['id'])[0]
+ self.assertEqual('', sp.sp_url)
+
+ sp = session.query(sp_table).filter(sp_table.c.id == sp3['id'])[0]
+ self.assertEqual('', sp.auth_url)
+ self.assertEqual('', sp.sp_url)
+
+_REVOKE_COLUMN_NAMES = ['id', 'domain_id', 'project_id', 'user_id', 'role_id',
+ 'trust_id', 'consumer_id', 'access_token_id',
+ 'issued_before', 'expires_at', 'revoked_at']
+
+
+class RevokeExtension(test_sql_upgrade.SqlMigrateBase):
+
+ def repo_package(self):
+ return revoke
+
+ def test_upgrade(self):
+ self.assertTableDoesNotExist('revocation_event')
+ self.upgrade(1, repository=self.repo_path)
+ self.assertTableColumns('revocation_event',
+ _REVOKE_COLUMN_NAMES)
+
+ def test_downgrade(self):
+ self.upgrade(1, repository=self.repo_path)
+ self.assertTableColumns('revocation_event',
+ _REVOKE_COLUMN_NAMES)
+ self.downgrade(0, repository=self.repo_path)
+ self.assertTableDoesNotExist('revocation_event')
diff --git a/keystone-moon/keystone/tests/unit/test_sql_upgrade.py b/keystone-moon/keystone/tests/unit/test_sql_upgrade.py
new file mode 100644
index 00000000..e50bad56
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/test_sql_upgrade.py
@@ -0,0 +1,957 @@
+# Copyright 2012 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+"""
+To run these tests against a live database:
+
+1. Modify the file ``keystone/tests/unit/config_files/backend_sql.conf`` to use
+ the connection for your live database.
+2. Set up a blank, live database
+3. Run the tests using::
+
+ tox -e py27 -- keystone.tests.unit.test_sql_upgrade
+
+WARNING::
+
+ Your database will be wiped.
+
+ Do not do this against a database with valuable data as
+ all data will be lost.
+"""
+
+import copy
+import json
+import uuid
+
+from migrate.versioning import api as versioning_api
+from oslo_config import cfg
+from oslo_db import exception as db_exception
+from oslo_db.sqlalchemy import migration
+from oslo_db.sqlalchemy import session as db_session
+import six
+from sqlalchemy.engine import reflection
+import sqlalchemy.exc
+from sqlalchemy import schema
+
+from keystone.common import sql
+from keystone.common.sql import migrate_repo
+from keystone.common.sql import migration_helpers
+from keystone.contrib import federation
+from keystone.contrib import revoke
+from keystone import exception
+from keystone.tests import unit as tests
+from keystone.tests.unit import default_fixtures
+from keystone.tests.unit.ksfixtures import database
+
+
+CONF = cfg.CONF
+DEFAULT_DOMAIN_ID = CONF.identity.default_domain_id
+
+# NOTE(morganfainberg): This should be updated when each DB migration collapse
+# is done to mirror the expected structure of the DB in the format of
+# { <DB_TABLE_NAME>: [<COLUMN>, <COLUMN>, ...], ... }
+INITIAL_TABLE_STRUCTURE = {
+ 'credential': [
+ 'id', 'user_id', 'project_id', 'blob', 'type', 'extra',
+ ],
+ 'domain': [
+ 'id', 'name', 'enabled', 'extra',
+ ],
+ 'endpoint': [
+ 'id', 'legacy_endpoint_id', 'interface', 'region', 'service_id', 'url',
+ 'enabled', 'extra',
+ ],
+ 'group': [
+ 'id', 'domain_id', 'name', 'description', 'extra',
+ ],
+ 'policy': [
+ 'id', 'type', 'blob', 'extra',
+ ],
+ 'project': [
+ 'id', 'name', 'extra', 'description', 'enabled', 'domain_id',
+ ],
+ 'role': [
+ 'id', 'name', 'extra',
+ ],
+ 'service': [
+ 'id', 'type', 'extra', 'enabled',
+ ],
+ 'token': [
+ 'id', 'expires', 'extra', 'valid', 'trust_id', 'user_id',
+ ],
+ 'trust': [
+ 'id', 'trustor_user_id', 'trustee_user_id', 'project_id',
+ 'impersonation', 'deleted_at', 'expires_at', 'remaining_uses', 'extra',
+ ],
+ 'trust_role': [
+ 'trust_id', 'role_id',
+ ],
+ 'user': [
+ 'id', 'name', 'extra', 'password', 'enabled', 'domain_id',
+ 'default_project_id',
+ ],
+ 'user_group_membership': [
+ 'user_id', 'group_id',
+ ],
+ 'region': [
+ 'id', 'description', 'parent_region_id', 'extra',
+ ],
+ 'assignment': [
+ 'type', 'actor_id', 'target_id', 'role_id', 'inherited',
+ ],
+}
+
+
+INITIAL_EXTENSION_TABLE_STRUCTURE = {
+ 'revocation_event': [
+ 'id', 'domain_id', 'project_id', 'user_id', 'role_id',
+ 'trust_id', 'consumer_id', 'access_token_id',
+ 'issued_before', 'expires_at', 'revoked_at', 'audit_id',
+ 'audit_chain_id',
+ ],
+}
+
+EXTENSIONS = {'federation': federation,
+ 'revoke': revoke}
+
+
+class SqlMigrateBase(tests.SQLDriverOverrides, tests.TestCase):
+ def initialize_sql(self):
+ self.metadata = sqlalchemy.MetaData()
+ self.metadata.bind = self.engine
+
+ def config_files(self):
+ config_files = super(SqlMigrateBase, self).config_files()
+ config_files.append(tests.dirs.tests_conf('backend_sql.conf'))
+ return config_files
+
+ def repo_package(self):
+ return sql
+
+ def setUp(self):
+ super(SqlMigrateBase, self).setUp()
+ database.initialize_sql_session()
+ conn_str = CONF.database.connection
+ if (conn_str != tests.IN_MEM_DB_CONN_STRING and
+ conn_str.startswith('sqlite') and
+ conn_str[10:] == tests.DEFAULT_TEST_DB_FILE):
+ # Override the default with a DB that is specific to the migration
+ # tests only if the DB Connection string is the same as the global
+ # default. This is required so that no conflicts occur due to the
+ # global default DB already being under migrate control. This is
+ # only needed if the DB is not-in-memory
+ db_file = tests.dirs.tmp('keystone_migrate_test.db')
+ self.config_fixture.config(
+ group='database',
+ connection='sqlite:///%s' % db_file)
+
+ # create and share a single sqlalchemy engine for testing
+ self.engine = sql.get_engine()
+ self.Session = db_session.get_maker(self.engine, autocommit=False)
+
+ self.initialize_sql()
+ self.repo_path = migration_helpers.find_migrate_repo(
+ self.repo_package())
+ self.schema = versioning_api.ControlledSchema.create(
+ self.engine,
+ self.repo_path, self.initial_db_version)
+
+ # auto-detect the highest available schema version in the migrate_repo
+ self.max_version = self.schema.repository.version().version
+
+ def tearDown(self):
+ sqlalchemy.orm.session.Session.close_all()
+ meta = sqlalchemy.MetaData()
+ meta.bind = self.engine
+ meta.reflect(self.engine)
+
+ with self.engine.begin() as conn:
+ inspector = reflection.Inspector.from_engine(self.engine)
+ metadata = schema.MetaData()
+ tbs = []
+ all_fks = []
+
+ for table_name in inspector.get_table_names():
+ fks = []
+ for fk in inspector.get_foreign_keys(table_name):
+ if not fk['name']:
+ continue
+ fks.append(
+ schema.ForeignKeyConstraint((), (), name=fk['name']))
+ table = schema.Table(table_name, metadata, *fks)
+ tbs.append(table)
+ all_fks.extend(fks)
+
+ for fkc in all_fks:
+ conn.execute(schema.DropConstraint(fkc))
+
+ for table in tbs:
+ conn.execute(schema.DropTable(table))
+
+ sql.cleanup()
+ super(SqlMigrateBase, self).tearDown()
+
+ def select_table(self, name):
+ table = sqlalchemy.Table(name,
+ self.metadata,
+ autoload=True)
+ s = sqlalchemy.select([table])
+ return s
+
+ def assertTableExists(self, table_name):
+ try:
+ self.select_table(table_name)
+ except sqlalchemy.exc.NoSuchTableError:
+ raise AssertionError('Table "%s" does not exist' % table_name)
+
+ def assertTableDoesNotExist(self, table_name):
+ """Asserts that a given table exists cannot be selected by name."""
+ # Switch to a different metadata otherwise you might still
+ # detect renamed or dropped tables
+ try:
+ temp_metadata = sqlalchemy.MetaData()
+ temp_metadata.bind = self.engine
+ sqlalchemy.Table(table_name, temp_metadata, autoload=True)
+ except sqlalchemy.exc.NoSuchTableError:
+ pass
+ else:
+ raise AssertionError('Table "%s" already exists' % table_name)
+
+ def upgrade(self, *args, **kwargs):
+ self._migrate(*args, **kwargs)
+
+ def downgrade(self, *args, **kwargs):
+ self._migrate(*args, downgrade=True, **kwargs)
+
+ def _migrate(self, version, repository=None, downgrade=False,
+ current_schema=None):
+ repository = repository or self.repo_path
+ err = ''
+ version = versioning_api._migrate_version(self.schema,
+ version,
+ not downgrade,
+ err)
+ if not current_schema:
+ current_schema = self.schema
+ changeset = current_schema.changeset(version)
+ for ver, change in changeset:
+ self.schema.runchange(ver, change, changeset.step)
+ self.assertEqual(self.schema.version, version)
+
+ def assertTableColumns(self, table_name, expected_cols):
+ """Asserts that the table contains the expected set of columns."""
+ self.initialize_sql()
+ table = self.select_table(table_name)
+ actual_cols = [col.name for col in table.columns]
+ # Check if the columns are equal, but allow for a different order,
+ # which might occur after an upgrade followed by a downgrade
+ self.assertItemsEqual(expected_cols, actual_cols,
+ '%s table' % table_name)
+
+ @property
+ def initial_db_version(self):
+ return getattr(self, '_initial_db_version', 0)
+
+
+class SqlUpgradeTests(SqlMigrateBase):
+
+ _initial_db_version = migrate_repo.DB_INIT_VERSION
+
+ def test_blank_db_to_start(self):
+ self.assertTableDoesNotExist('user')
+
+ def test_start_version_db_init_version(self):
+ version = migration.db_version(sql.get_engine(), self.repo_path,
+ migrate_repo.DB_INIT_VERSION)
+ self.assertEqual(
+ migrate_repo.DB_INIT_VERSION,
+ version,
+ 'DB is not at version %s' % migrate_repo.DB_INIT_VERSION)
+
+ def test_two_steps_forward_one_step_back(self):
+ """You should be able to cleanly undo and re-apply all upgrades.
+
+ Upgrades are run in the following order::
+
+ Starting with the initial version defined at
+ keystone.common.migrate_repo.DB_INIT_VERSION
+
+ INIT +1 -> INIT +2 -> INIT +1 -> INIT +2 -> INIT +3 -> INIT +2 ...
+ ^---------------------^ ^---------------------^
+
+ Downgrade to the DB_INIT_VERSION does not occur based on the
+ requirement that the base version be DB_INIT_VERSION + 1 before
+ migration can occur. Downgrade below DB_INIT_VERSION + 1 is no longer
+ supported.
+
+ DB_INIT_VERSION is the number preceding the release schema version from
+ two releases prior. Example, Juno releases with the DB_INIT_VERSION
+ being 35 where Havana (Havana was two releases before Juno) release
+ schema version is 36.
+
+ The migrate utility requires the db must be initialized under version
+ control with the revision directly before the first version to be
+ applied.
+
+ """
+ for x in range(migrate_repo.DB_INIT_VERSION + 1,
+ self.max_version + 1):
+ self.upgrade(x)
+ downgrade_ver = x - 1
+ # Don't actually downgrade to the init version. This will raise
+ # a not-implemented error.
+ if downgrade_ver != migrate_repo.DB_INIT_VERSION:
+ self.downgrade(x - 1)
+ self.upgrade(x)
+
+ def test_upgrade_add_initial_tables(self):
+ self.upgrade(migrate_repo.DB_INIT_VERSION + 1)
+ self.check_initial_table_structure()
+
+ def check_initial_table_structure(self):
+ for table in INITIAL_TABLE_STRUCTURE:
+ self.assertTableColumns(table, INITIAL_TABLE_STRUCTURE[table])
+
+ # Ensure the default domain was properly created.
+ default_domain = migration_helpers.get_default_domain()
+
+ meta = sqlalchemy.MetaData()
+ meta.bind = self.engine
+
+ domain_table = sqlalchemy.Table('domain', meta, autoload=True)
+
+ session = self.Session()
+ q = session.query(domain_table)
+ refs = q.all()
+
+ self.assertEqual(1, len(refs))
+ for k in default_domain.keys():
+ self.assertEqual(default_domain[k], getattr(refs[0], k))
+
+ def test_downgrade_to_db_init_version(self):
+ self.upgrade(self.max_version)
+
+ if self.engine.name == 'mysql':
+ self._mysql_check_all_tables_innodb()
+
+ self.downgrade(migrate_repo.DB_INIT_VERSION + 1)
+ self.check_initial_table_structure()
+
+ meta = sqlalchemy.MetaData()
+ meta.bind = self.engine
+ meta.reflect(self.engine)
+
+ initial_table_set = set(INITIAL_TABLE_STRUCTURE.keys())
+ table_set = set(meta.tables.keys())
+ # explicitly remove the migrate_version table, this is not controlled
+ # by the migration scripts and should be exempt from this check.
+ table_set.remove('migrate_version')
+
+ self.assertSetEqual(initial_table_set, table_set)
+ # Downgrade to before Icehouse's release schema version (044) is not
+ # supported. A NotImplementedError should be raised when attempting to
+ # downgrade.
+ self.assertRaises(NotImplementedError, self.downgrade,
+ migrate_repo.DB_INIT_VERSION)
+
+ def insert_dict(self, session, table_name, d, table=None):
+ """Naively inserts key-value pairs into a table, given a dictionary."""
+ if table is None:
+ this_table = sqlalchemy.Table(table_name, self.metadata,
+ autoload=True)
+ else:
+ this_table = table
+ insert = this_table.insert().values(**d)
+ session.execute(insert)
+ session.commit()
+
+ def test_id_mapping(self):
+ self.upgrade(50)
+ self.assertTableDoesNotExist('id_mapping')
+ self.upgrade(51)
+ self.assertTableExists('id_mapping')
+ self.downgrade(50)
+ self.assertTableDoesNotExist('id_mapping')
+
+ def test_region_url_upgrade(self):
+ self.upgrade(52)
+ self.assertTableColumns('region',
+ ['id', 'description', 'parent_region_id',
+ 'extra', 'url'])
+
+ def test_region_url_downgrade(self):
+ self.upgrade(52)
+ self.downgrade(51)
+ self.assertTableColumns('region',
+ ['id', 'description', 'parent_region_id',
+ 'extra'])
+
+ def test_region_url_cleanup(self):
+ # make sure that the url field is dropped in the downgrade
+ self.upgrade(52)
+ session = self.Session()
+ beta = {
+ 'id': uuid.uuid4().hex,
+ 'description': uuid.uuid4().hex,
+ 'parent_region_id': uuid.uuid4().hex,
+ 'url': uuid.uuid4().hex
+ }
+ acme = {
+ 'id': uuid.uuid4().hex,
+ 'description': uuid.uuid4().hex,
+ 'parent_region_id': uuid.uuid4().hex,
+ 'url': None
+ }
+ self.insert_dict(session, 'region', beta)
+ self.insert_dict(session, 'region', acme)
+ region_table = sqlalchemy.Table('region', self.metadata, autoload=True)
+ self.assertEqual(2, session.query(region_table).count())
+ session.close()
+ self.downgrade(51)
+ session = self.Session()
+ self.metadata.clear()
+ region_table = sqlalchemy.Table('region', self.metadata, autoload=True)
+ self.assertEqual(2, session.query(region_table).count())
+ region = session.query(region_table)[0]
+ self.assertRaises(AttributeError, getattr, region, 'url')
+
+ def test_endpoint_region_upgrade_columns(self):
+ self.upgrade(53)
+ self.assertTableColumns('endpoint',
+ ['id', 'legacy_endpoint_id', 'interface',
+ 'service_id', 'url', 'extra', 'enabled',
+ 'region_id'])
+ region_table = sqlalchemy.Table('region', self.metadata, autoload=True)
+ self.assertEqual(255, region_table.c.id.type.length)
+ self.assertEqual(255, region_table.c.parent_region_id.type.length)
+ endpoint_table = sqlalchemy.Table('endpoint',
+ self.metadata,
+ autoload=True)
+ self.assertEqual(255, endpoint_table.c.region_id.type.length)
+
+ def test_endpoint_region_downgrade_columns(self):
+ self.upgrade(53)
+ self.downgrade(52)
+ self.assertTableColumns('endpoint',
+ ['id', 'legacy_endpoint_id', 'interface',
+ 'service_id', 'url', 'extra', 'enabled',
+ 'region'])
+ region_table = sqlalchemy.Table('region', self.metadata, autoload=True)
+ self.assertEqual(64, region_table.c.id.type.length)
+ self.assertEqual(64, region_table.c.parent_region_id.type.length)
+ endpoint_table = sqlalchemy.Table('endpoint',
+ self.metadata,
+ autoload=True)
+ self.assertEqual(255, endpoint_table.c.region.type.length)
+
+ def test_endpoint_region_migration(self):
+ self.upgrade(52)
+ session = self.Session()
+ _small_region_name = '0' * 30
+ _long_region_name = '0' * 255
+ _clashing_region_name = '0' * 70
+
+ def add_service():
+ service_id = uuid.uuid4().hex
+
+ service = {
+ 'id': service_id,
+ 'type': uuid.uuid4().hex
+ }
+
+ self.insert_dict(session, 'service', service)
+
+ return service_id
+
+ def add_endpoint(service_id, region):
+ endpoint_id = uuid.uuid4().hex
+
+ endpoint = {
+ 'id': endpoint_id,
+ 'interface': uuid.uuid4().hex[:8],
+ 'service_id': service_id,
+ 'url': uuid.uuid4().hex,
+ 'region': region
+ }
+ self.insert_dict(session, 'endpoint', endpoint)
+
+ return endpoint_id
+
+ _service_id_ = add_service()
+ add_endpoint(_service_id_, region=_long_region_name)
+ add_endpoint(_service_id_, region=_long_region_name)
+ add_endpoint(_service_id_, region=_clashing_region_name)
+ add_endpoint(_service_id_, region=_small_region_name)
+ add_endpoint(_service_id_, region=None)
+
+ # upgrade to 53
+ session.close()
+ self.upgrade(53)
+ session = self.Session()
+ self.metadata.clear()
+
+ region_table = sqlalchemy.Table('region', self.metadata, autoload=True)
+ self.assertEqual(1, session.query(region_table).
+ filter_by(id=_long_region_name).count())
+ self.assertEqual(1, session.query(region_table).
+ filter_by(id=_clashing_region_name).count())
+ self.assertEqual(1, session.query(region_table).
+ filter_by(id=_small_region_name).count())
+
+ endpoint_table = sqlalchemy.Table('endpoint',
+ self.metadata,
+ autoload=True)
+ self.assertEqual(5, session.query(endpoint_table).count())
+ self.assertEqual(2, session.query(endpoint_table).
+ filter_by(region_id=_long_region_name).count())
+ self.assertEqual(1, session.query(endpoint_table).
+ filter_by(region_id=_clashing_region_name).count())
+ self.assertEqual(1, session.query(endpoint_table).
+ filter_by(region_id=_small_region_name).count())
+
+ # downgrade to 52
+ session.close()
+ self.downgrade(52)
+ session = self.Session()
+ self.metadata.clear()
+
+ region_table = sqlalchemy.Table('region', self.metadata, autoload=True)
+ self.assertEqual(1, session.query(region_table).count())
+ self.assertEqual(1, session.query(region_table).
+ filter_by(id=_small_region_name).count())
+
+ endpoint_table = sqlalchemy.Table('endpoint',
+ self.metadata,
+ autoload=True)
+ self.assertEqual(5, session.query(endpoint_table).count())
+ self.assertEqual(2, session.query(endpoint_table).
+ filter_by(region=_long_region_name).count())
+ self.assertEqual(1, session.query(endpoint_table).
+ filter_by(region=_clashing_region_name).count())
+ self.assertEqual(1, session.query(endpoint_table).
+ filter_by(region=_small_region_name).count())
+
+ def test_add_actor_id_index(self):
+ self.upgrade(53)
+ self.upgrade(54)
+ table = sqlalchemy.Table('assignment', self.metadata, autoload=True)
+ index_data = [(idx.name, idx.columns.keys()) for idx in table.indexes]
+ self.assertIn(('ix_actor_id', ['actor_id']), index_data)
+
+ def test_token_user_id_and_trust_id_index_upgrade(self):
+ self.upgrade(54)
+ self.upgrade(55)
+ table = sqlalchemy.Table('token', self.metadata, autoload=True)
+ index_data = [(idx.name, idx.columns.keys()) for idx in table.indexes]
+ self.assertIn(('ix_token_user_id', ['user_id']), index_data)
+ self.assertIn(('ix_token_trust_id', ['trust_id']), index_data)
+
+ def test_token_user_id_and_trust_id_index_downgrade(self):
+ self.upgrade(55)
+ self.downgrade(54)
+ table = sqlalchemy.Table('token', self.metadata, autoload=True)
+ index_data = [(idx.name, idx.columns.keys()) for idx in table.indexes]
+ self.assertNotIn(('ix_token_user_id', ['user_id']), index_data)
+ self.assertNotIn(('ix_token_trust_id', ['trust_id']), index_data)
+
+ def test_remove_actor_id_index(self):
+ self.upgrade(54)
+ self.downgrade(53)
+ table = sqlalchemy.Table('assignment', self.metadata, autoload=True)
+ index_data = [(idx.name, idx.columns.keys()) for idx in table.indexes]
+ self.assertNotIn(('ix_actor_id', ['actor_id']), index_data)
+
+ def test_project_parent_id_upgrade(self):
+ self.upgrade(61)
+ self.assertTableColumns('project',
+ ['id', 'name', 'extra', 'description',
+ 'enabled', 'domain_id', 'parent_id'])
+
+ def test_project_parent_id_downgrade(self):
+ self.upgrade(61)
+ self.downgrade(60)
+ self.assertTableColumns('project',
+ ['id', 'name', 'extra', 'description',
+ 'enabled', 'domain_id'])
+
+ def test_project_parent_id_cleanup(self):
+ # make sure that the parent_id field is dropped in the downgrade
+ self.upgrade(61)
+ session = self.Session()
+ domain = {'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'enabled': True}
+ acme = {
+ 'id': uuid.uuid4().hex,
+ 'description': uuid.uuid4().hex,
+ 'domain_id': domain['id'],
+ 'name': uuid.uuid4().hex,
+ 'parent_id': None
+ }
+ beta = {
+ 'id': uuid.uuid4().hex,
+ 'description': uuid.uuid4().hex,
+ 'domain_id': domain['id'],
+ 'name': uuid.uuid4().hex,
+ 'parent_id': acme['id']
+ }
+ self.insert_dict(session, 'domain', domain)
+ self.insert_dict(session, 'project', acme)
+ self.insert_dict(session, 'project', beta)
+ proj_table = sqlalchemy.Table('project', self.metadata, autoload=True)
+ self.assertEqual(2, session.query(proj_table).count())
+ session.close()
+ self.downgrade(60)
+ session = self.Session()
+ self.metadata.clear()
+ proj_table = sqlalchemy.Table('project', self.metadata, autoload=True)
+ self.assertEqual(2, session.query(proj_table).count())
+ project = session.query(proj_table)[0]
+ self.assertRaises(AttributeError, getattr, project, 'parent_id')
+
+ def test_drop_assignment_role_fk(self):
+ self.upgrade(61)
+ self.assertTrue(self.does_fk_exist('assignment', 'role_id'))
+ self.upgrade(62)
+ if self.engine.name != 'sqlite':
+ # sqlite does not support FK deletions (or enforcement)
+ self.assertFalse(self.does_fk_exist('assignment', 'role_id'))
+ self.downgrade(61)
+ self.assertTrue(self.does_fk_exist('assignment', 'role_id'))
+
+ def does_fk_exist(self, table, fk_column):
+ inspector = reflection.Inspector.from_engine(self.engine)
+ for fk in inspector.get_foreign_keys(table):
+ if fk_column in fk['constrained_columns']:
+ return True
+ return False
+
+ def test_drop_region_url_upgrade(self):
+ self.upgrade(63)
+ self.assertTableColumns('region',
+ ['id', 'description', 'parent_region_id',
+ 'extra'])
+
+ def test_drop_region_url_downgrade(self):
+ self.upgrade(63)
+ self.downgrade(62)
+ self.assertTableColumns('region',
+ ['id', 'description', 'parent_region_id',
+ 'extra', 'url'])
+
+ def test_drop_domain_fk(self):
+ self.upgrade(63)
+ self.assertTrue(self.does_fk_exist('group', 'domain_id'))
+ self.assertTrue(self.does_fk_exist('user', 'domain_id'))
+ self.upgrade(64)
+ if self.engine.name != 'sqlite':
+ # sqlite does not support FK deletions (or enforcement)
+ self.assertFalse(self.does_fk_exist('group', 'domain_id'))
+ self.assertFalse(self.does_fk_exist('user', 'domain_id'))
+ self.downgrade(63)
+ self.assertTrue(self.does_fk_exist('group', 'domain_id'))
+ self.assertTrue(self.does_fk_exist('user', 'domain_id'))
+
+ def test_add_domain_config(self):
+ whitelisted_table = 'whitelisted_config'
+ sensitive_table = 'sensitive_config'
+ self.upgrade(64)
+ self.assertTableDoesNotExist(whitelisted_table)
+ self.assertTableDoesNotExist(sensitive_table)
+ self.upgrade(65)
+ self.assertTableColumns(whitelisted_table,
+ ['domain_id', 'group', 'option', 'value'])
+ self.assertTableColumns(sensitive_table,
+ ['domain_id', 'group', 'option', 'value'])
+ self.downgrade(64)
+ self.assertTableDoesNotExist(whitelisted_table)
+ self.assertTableDoesNotExist(sensitive_table)
+
+ def test_fixup_service_name_value_upgrade(self):
+ """Update service name data from `extra` to empty string."""
+ def add_service(**extra_data):
+ service_id = uuid.uuid4().hex
+
+ service = {
+ 'id': service_id,
+ 'type': uuid.uuid4().hex,
+ 'extra': json.dumps(extra_data),
+ }
+
+ self.insert_dict(session, 'service', service)
+
+ return service_id
+
+ self.upgrade(65)
+ session = self.Session()
+
+ # Services with extra values having a random attribute and
+ # different combinations of name
+ random_attr_name = uuid.uuid4().hex
+ random_attr_value = uuid.uuid4().hex
+ random_attr_str = "%s='%s'" % (random_attr_name, random_attr_value)
+ random_attr_no_name = {random_attr_name: random_attr_value}
+ random_attr_no_name_str = "%s='%s'" % (random_attr_name,
+ random_attr_value)
+ random_attr_name_value = {random_attr_name: random_attr_value,
+ 'name': 'myname'}
+ random_attr_name_value_str = 'name=myname,%s' % random_attr_str
+ random_attr_name_empty = {random_attr_name: random_attr_value,
+ 'name': ''}
+ random_attr_name_empty_str = 'name=,%s' % random_attr_str
+ random_attr_name_none = {random_attr_name: random_attr_value,
+ 'name': None}
+ random_attr_name_none_str = 'name=None,%s' % random_attr_str
+
+ services = [
+ (add_service(**random_attr_no_name),
+ random_attr_name_empty, random_attr_no_name_str),
+ (add_service(**random_attr_name_value),
+ random_attr_name_value, random_attr_name_value_str),
+ (add_service(**random_attr_name_empty),
+ random_attr_name_empty, random_attr_name_empty_str),
+ (add_service(**random_attr_name_none),
+ random_attr_name_empty, random_attr_name_none_str),
+ ]
+
+ session.close()
+ self.upgrade(66)
+ session = self.Session()
+
+ # Verify that the services have the expected values.
+ self.metadata.clear()
+ service_table = sqlalchemy.Table('service', self.metadata,
+ autoload=True)
+
+ def fetch_service_extra(service_id):
+ cols = [service_table.c.extra]
+ f = service_table.c.id == service_id
+ s = sqlalchemy.select(cols).where(f)
+ service = session.execute(s).fetchone()
+ return json.loads(service.extra)
+
+ for service_id, exp_extra, msg in services:
+ extra = fetch_service_extra(service_id)
+ self.assertDictEqual(exp_extra, extra, msg)
+
+ def populate_user_table(self, with_pass_enab=False,
+ with_pass_enab_domain=False):
+ # Populate the appropriate fields in the user
+ # table, depending on the parameters:
+ #
+ # Default: id, name, extra
+ # pass_enab: Add password, enabled as well
+ # pass_enab_domain: Add password, enabled and domain as well
+ #
+ this_table = sqlalchemy.Table("user",
+ self.metadata,
+ autoload=True)
+ for user in default_fixtures.USERS:
+ extra = copy.deepcopy(user)
+ extra.pop('id')
+ extra.pop('name')
+
+ if with_pass_enab:
+ password = extra.pop('password', None)
+ enabled = extra.pop('enabled', True)
+ ins = this_table.insert().values(
+ {'id': user['id'],
+ 'name': user['name'],
+ 'password': password,
+ 'enabled': bool(enabled),
+ 'extra': json.dumps(extra)})
+ else:
+ if with_pass_enab_domain:
+ password = extra.pop('password', None)
+ enabled = extra.pop('enabled', True)
+ extra.pop('domain_id')
+ ins = this_table.insert().values(
+ {'id': user['id'],
+ 'name': user['name'],
+ 'domain_id': user['domain_id'],
+ 'password': password,
+ 'enabled': bool(enabled),
+ 'extra': json.dumps(extra)})
+ else:
+ ins = this_table.insert().values(
+ {'id': user['id'],
+ 'name': user['name'],
+ 'extra': json.dumps(extra)})
+ self.engine.execute(ins)
+
+ def populate_tenant_table(self, with_desc_enab=False,
+ with_desc_enab_domain=False):
+ # Populate the appropriate fields in the tenant or
+ # project table, depending on the parameters
+ #
+ # Default: id, name, extra
+ # desc_enab: Add description, enabled as well
+ # desc_enab_domain: Add description, enabled and domain as well,
+ # plus use project instead of tenant
+ #
+ if with_desc_enab_domain:
+ # By this time tenants are now projects
+ this_table = sqlalchemy.Table("project",
+ self.metadata,
+ autoload=True)
+ else:
+ this_table = sqlalchemy.Table("tenant",
+ self.metadata,
+ autoload=True)
+
+ for tenant in default_fixtures.TENANTS:
+ extra = copy.deepcopy(tenant)
+ extra.pop('id')
+ extra.pop('name')
+
+ if with_desc_enab:
+ desc = extra.pop('description', None)
+ enabled = extra.pop('enabled', True)
+ ins = this_table.insert().values(
+ {'id': tenant['id'],
+ 'name': tenant['name'],
+ 'description': desc,
+ 'enabled': bool(enabled),
+ 'extra': json.dumps(extra)})
+ else:
+ if with_desc_enab_domain:
+ desc = extra.pop('description', None)
+ enabled = extra.pop('enabled', True)
+ extra.pop('domain_id')
+ ins = this_table.insert().values(
+ {'id': tenant['id'],
+ 'name': tenant['name'],
+ 'domain_id': tenant['domain_id'],
+ 'description': desc,
+ 'enabled': bool(enabled),
+ 'extra': json.dumps(extra)})
+ else:
+ ins = this_table.insert().values(
+ {'id': tenant['id'],
+ 'name': tenant['name'],
+ 'extra': json.dumps(extra)})
+ self.engine.execute(ins)
+
+ def _mysql_check_all_tables_innodb(self):
+ database = self.engine.url.database
+
+ connection = self.engine.connect()
+ # sanity check
+ total = connection.execute("SELECT count(*) "
+ "from information_schema.TABLES "
+ "where TABLE_SCHEMA='%(database)s'" %
+ dict(database=database))
+ self.assertTrue(total.scalar() > 0, "No tables found. Wrong schema?")
+
+ noninnodb = connection.execute("SELECT table_name "
+ "from information_schema.TABLES "
+ "where TABLE_SCHEMA='%(database)s' "
+ "and ENGINE!='InnoDB' "
+ "and TABLE_NAME!='migrate_version'" %
+ dict(database=database))
+ names = [x[0] for x in noninnodb]
+ self.assertEqual([], names,
+ "Non-InnoDB tables exist")
+
+ connection.close()
+
+
+class VersionTests(SqlMigrateBase):
+
+ _initial_db_version = migrate_repo.DB_INIT_VERSION
+
+ def test_core_initial(self):
+ """Get the version before migrated, it's the initial DB version."""
+ version = migration_helpers.get_db_version()
+ self.assertEqual(migrate_repo.DB_INIT_VERSION, version)
+
+ def test_core_max(self):
+ """When get the version after upgrading, it's the new version."""
+ self.upgrade(self.max_version)
+ version = migration_helpers.get_db_version()
+ self.assertEqual(self.max_version, version)
+
+ def test_extension_not_controlled(self):
+ """When get the version before controlling, raises DbMigrationError."""
+ self.assertRaises(db_exception.DbMigrationError,
+ migration_helpers.get_db_version,
+ extension='federation')
+
+ def test_extension_initial(self):
+ """When get the initial version of an extension, it's 0."""
+ for name, extension in six.iteritems(EXTENSIONS):
+ abs_path = migration_helpers.find_migrate_repo(extension)
+ migration.db_version_control(sql.get_engine(), abs_path)
+ version = migration_helpers.get_db_version(extension=name)
+ self.assertEqual(0, version,
+ 'Migrate version for %s is not 0' % name)
+
+ def test_extension_migrated(self):
+ """When get the version after migrating an extension, it's not 0."""
+ for name, extension in six.iteritems(EXTENSIONS):
+ abs_path = migration_helpers.find_migrate_repo(extension)
+ migration.db_version_control(sql.get_engine(), abs_path)
+ migration.db_sync(sql.get_engine(), abs_path)
+ version = migration_helpers.get_db_version(extension=name)
+ self.assertTrue(
+ version > 0,
+ "Version for %s didn't change after migrated?" % name)
+
+ def test_extension_downgraded(self):
+ """When get the version after downgrading an extension, it is 0."""
+ for name, extension in six.iteritems(EXTENSIONS):
+ abs_path = migration_helpers.find_migrate_repo(extension)
+ migration.db_version_control(sql.get_engine(), abs_path)
+ migration.db_sync(sql.get_engine(), abs_path)
+ version = migration_helpers.get_db_version(extension=name)
+ self.assertTrue(
+ version > 0,
+ "Version for %s didn't change after migrated?" % name)
+ migration.db_sync(sql.get_engine(), abs_path, version=0)
+ version = migration_helpers.get_db_version(extension=name)
+ self.assertEqual(0, version,
+ 'Migrate version for %s is not 0' % name)
+
+ def test_unexpected_extension(self):
+ """The version for an extension that doesn't exist raises ImportError.
+
+ """
+
+ extension_name = uuid.uuid4().hex
+ self.assertRaises(ImportError,
+ migration_helpers.get_db_version,
+ extension=extension_name)
+
+ def test_unversioned_extension(self):
+ """The version for extensions without migrations raise an exception.
+
+ """
+
+ self.assertRaises(exception.MigrationNotProvided,
+ migration_helpers.get_db_version,
+ extension='admin_crud')
+
+ def test_initial_with_extension_version_None(self):
+ """When performing a default migration, also migrate extensions."""
+ migration_helpers.sync_database_to_version(extension=None,
+ version=None)
+ for table in INITIAL_EXTENSION_TABLE_STRUCTURE:
+ self.assertTableColumns(table,
+ INITIAL_EXTENSION_TABLE_STRUCTURE[table])
+
+ def test_initial_with_extension_version_max(self):
+ """When migrating to max version, do not migrate extensions."""
+ migration_helpers.sync_database_to_version(extension=None,
+ version=self.max_version)
+ for table in INITIAL_EXTENSION_TABLE_STRUCTURE:
+ self.assertTableDoesNotExist(table)
diff --git a/keystone-moon/keystone/tests/unit/test_ssl.py b/keystone-moon/keystone/tests/unit/test_ssl.py
new file mode 100644
index 00000000..c5f443b0
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/test_ssl.py
@@ -0,0 +1,176 @@
+# Copyright 2012 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import ssl
+
+from oslo_config import cfg
+
+from keystone.common import environment
+from keystone.tests import unit as tests
+from keystone.tests.unit.ksfixtures import appserver
+
+
+CONF = cfg.CONF
+
+CERTDIR = tests.dirs.root('examples', 'pki', 'certs')
+KEYDIR = tests.dirs.root('examples', 'pki', 'private')
+CERT = os.path.join(CERTDIR, 'ssl_cert.pem')
+KEY = os.path.join(KEYDIR, 'ssl_key.pem')
+CA = os.path.join(CERTDIR, 'cacert.pem')
+CLIENT = os.path.join(CERTDIR, 'middleware.pem')
+
+
+class SSLTestCase(tests.TestCase):
+ def setUp(self):
+ super(SSLTestCase, self).setUp()
+ # NOTE(jamespage):
+ # Deal with more secure certificate chain verification
+ # introduced in python 2.7.9 under PEP-0476
+ # https://github.com/python/peps/blob/master/pep-0476.txt
+ self.context = None
+ if hasattr(ssl, '_create_unverified_context'):
+ self.context = ssl._create_unverified_context()
+ self.load_backends()
+
+ def get_HTTPSConnection(self, *args):
+ """Simple helper to configure HTTPSConnection objects."""
+ if self.context:
+ return environment.httplib.HTTPSConnection(
+ *args,
+ context=self.context
+ )
+ else:
+ return environment.httplib.HTTPSConnection(*args)
+
+ def test_1way_ssl_ok(self):
+ """Make sure both public and admin API work with 1-way SSL."""
+ paste_conf = self._paste_config('keystone')
+ ssl_kwargs = dict(cert=CERT, key=KEY, ca=CA)
+
+ # Verify Admin
+ with appserver.AppServer(paste_conf, appserver.ADMIN, **ssl_kwargs):
+ conn = self.get_HTTPSConnection(
+ '127.0.0.1', CONF.eventlet_server.admin_port)
+ conn.request('GET', '/')
+ resp = conn.getresponse()
+ self.assertEqual(300, resp.status)
+
+ # Verify Public
+ with appserver.AppServer(paste_conf, appserver.MAIN, **ssl_kwargs):
+ conn = self.get_HTTPSConnection(
+ '127.0.0.1', CONF.eventlet_server.public_port)
+ conn.request('GET', '/')
+ resp = conn.getresponse()
+ self.assertEqual(300, resp.status)
+
+ def test_2way_ssl_ok(self):
+ """Make sure both public and admin API work with 2-way SSL.
+
+ Requires client certificate.
+ """
+ paste_conf = self._paste_config('keystone')
+ ssl_kwargs = dict(cert=CERT, key=KEY, ca=CA, cert_required=True)
+
+ # Verify Admin
+ with appserver.AppServer(paste_conf, appserver.ADMIN, **ssl_kwargs):
+ conn = self.get_HTTPSConnection(
+ '127.0.0.1', CONF.eventlet_server.admin_port, CLIENT, CLIENT)
+ conn.request('GET', '/')
+ resp = conn.getresponse()
+ self.assertEqual(300, resp.status)
+
+ # Verify Public
+ with appserver.AppServer(paste_conf, appserver.MAIN, **ssl_kwargs):
+ conn = self.get_HTTPSConnection(
+ '127.0.0.1', CONF.eventlet_server.public_port, CLIENT, CLIENT)
+ conn.request('GET', '/')
+ resp = conn.getresponse()
+ self.assertEqual(300, resp.status)
+
+ def test_1way_ssl_with_ipv6_ok(self):
+ """Make sure both public and admin API work with 1-way ipv6 & SSL."""
+ self.skip_if_no_ipv6()
+
+ paste_conf = self._paste_config('keystone')
+ ssl_kwargs = dict(cert=CERT, key=KEY, ca=CA, host="::1")
+
+ # Verify Admin
+ with appserver.AppServer(paste_conf, appserver.ADMIN, **ssl_kwargs):
+ conn = self.get_HTTPSConnection(
+ '::1', CONF.eventlet_server.admin_port)
+ conn.request('GET', '/')
+ resp = conn.getresponse()
+ self.assertEqual(300, resp.status)
+
+ # Verify Public
+ with appserver.AppServer(paste_conf, appserver.MAIN, **ssl_kwargs):
+ conn = self.get_HTTPSConnection(
+ '::1', CONF.eventlet_server.public_port)
+ conn.request('GET', '/')
+ resp = conn.getresponse()
+ self.assertEqual(300, resp.status)
+
+ def test_2way_ssl_with_ipv6_ok(self):
+ """Make sure both public and admin API work with 2-way ipv6 & SSL.
+
+ Requires client certificate.
+ """
+ self.skip_if_no_ipv6()
+
+ paste_conf = self._paste_config('keystone')
+ ssl_kwargs = dict(cert=CERT, key=KEY, ca=CA,
+ cert_required=True, host="::1")
+
+ # Verify Admin
+ with appserver.AppServer(paste_conf, appserver.ADMIN, **ssl_kwargs):
+ conn = self.get_HTTPSConnection(
+ '::1', CONF.eventlet_server.admin_port, CLIENT, CLIENT)
+ conn.request('GET', '/')
+ resp = conn.getresponse()
+ self.assertEqual(300, resp.status)
+
+ # Verify Public
+ with appserver.AppServer(paste_conf, appserver.MAIN, **ssl_kwargs):
+ conn = self.get_HTTPSConnection(
+ '::1', CONF.eventlet_server.public_port, CLIENT, CLIENT)
+ conn.request('GET', '/')
+ resp = conn.getresponse()
+ self.assertEqual(300, resp.status)
+
+ def test_2way_ssl_fail(self):
+ """Expect to fail when client does not present proper certificate."""
+ paste_conf = self._paste_config('keystone')
+ ssl_kwargs = dict(cert=CERT, key=KEY, ca=CA, cert_required=True)
+
+ # Verify Admin
+ with appserver.AppServer(paste_conf, appserver.ADMIN, **ssl_kwargs):
+ conn = self.get_HTTPSConnection(
+ '127.0.0.1', CONF.eventlet_server.admin_port)
+ try:
+ conn.request('GET', '/')
+ self.fail('Admin API shoulda failed with SSL handshake!')
+ except ssl.SSLError:
+ pass
+
+ # Verify Public
+ with appserver.AppServer(paste_conf, appserver.MAIN, **ssl_kwargs):
+ conn = self.get_HTTPSConnection(
+ '127.0.0.1', CONF.eventlet_server.public_port)
+ try:
+ conn.request('GET', '/')
+ self.fail('Public API shoulda failed with SSL handshake!')
+ except ssl.SSLError:
+ pass
diff --git a/keystone-moon/keystone/tests/unit/test_token_bind.py b/keystone-moon/keystone/tests/unit/test_token_bind.py
new file mode 100644
index 00000000..7dc7ccca
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/test_token_bind.py
@@ -0,0 +1,198 @@
+# Copyright 2013 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import copy
+import uuid
+
+from keystone.common import wsgi
+from keystone import exception
+from keystone.models import token_model
+from keystone.tests import unit as tests
+from keystone.tests.unit import test_token_provider
+
+
+KERBEROS_BIND = 'USER@REALM'
+ANY = 'any'
+
+
+class BindTest(tests.TestCase):
+ """Test binding tokens to a Principal.
+
+ Even though everything in this file references kerberos the same concepts
+ will apply to all future binding mechanisms.
+ """
+
+ def setUp(self):
+ super(BindTest, self).setUp()
+ self.TOKEN_BIND_KERB = copy.deepcopy(
+ test_token_provider.SAMPLE_V3_TOKEN)
+ self.TOKEN_BIND_KERB['token']['bind'] = {'kerberos': KERBEROS_BIND}
+ self.TOKEN_BIND_UNKNOWN = copy.deepcopy(
+ test_token_provider.SAMPLE_V3_TOKEN)
+ self.TOKEN_BIND_UNKNOWN['token']['bind'] = {'FOO': 'BAR'}
+ self.TOKEN_BIND_NONE = copy.deepcopy(
+ test_token_provider.SAMPLE_V3_TOKEN)
+
+ self.ALL_TOKENS = [self.TOKEN_BIND_KERB, self.TOKEN_BIND_UNKNOWN,
+ self.TOKEN_BIND_NONE]
+
+ def assert_kerberos_bind(self, tokens, bind_level,
+ use_kerberos=True, success=True):
+ if not isinstance(tokens, dict):
+ for token in tokens:
+ self.assert_kerberos_bind(token, bind_level,
+ use_kerberos=use_kerberos,
+ success=success)
+ elif use_kerberos == ANY:
+ for val in (True, False):
+ self.assert_kerberos_bind(tokens, bind_level,
+ use_kerberos=val, success=success)
+ else:
+ context = {'environment': {}}
+ self.config_fixture.config(group='token',
+ enforce_token_bind=bind_level)
+
+ if use_kerberos:
+ context['environment']['REMOTE_USER'] = KERBEROS_BIND
+ context['environment']['AUTH_TYPE'] = 'Negotiate'
+
+ # NOTE(morganfainberg): This assumes a V3 token.
+ token_ref = token_model.KeystoneToken(
+ token_id=uuid.uuid4().hex,
+ token_data=tokens)
+
+ if not success:
+ self.assertRaises(exception.Unauthorized,
+ wsgi.validate_token_bind,
+ context, token_ref)
+ else:
+ wsgi.validate_token_bind(context, token_ref)
+
+ # DISABLED
+
+ def test_bind_disabled_with_kerb_user(self):
+ self.assert_kerberos_bind(self.ALL_TOKENS,
+ bind_level='disabled',
+ use_kerberos=ANY,
+ success=True)
+
+ # PERMISSIVE
+
+ def test_bind_permissive_with_kerb_user(self):
+ self.assert_kerberos_bind(self.TOKEN_BIND_KERB,
+ bind_level='permissive',
+ use_kerberos=True,
+ success=True)
+
+ def test_bind_permissive_with_regular_token(self):
+ self.assert_kerberos_bind(self.TOKEN_BIND_NONE,
+ bind_level='permissive',
+ use_kerberos=ANY,
+ success=True)
+
+ def test_bind_permissive_without_kerb_user(self):
+ self.assert_kerberos_bind(self.TOKEN_BIND_KERB,
+ bind_level='permissive',
+ use_kerberos=False,
+ success=False)
+
+ def test_bind_permissive_with_unknown_bind(self):
+ self.assert_kerberos_bind(self.TOKEN_BIND_UNKNOWN,
+ bind_level='permissive',
+ use_kerberos=ANY,
+ success=True)
+
+ # STRICT
+
+ def test_bind_strict_with_regular_token(self):
+ self.assert_kerberos_bind(self.TOKEN_BIND_NONE,
+ bind_level='strict',
+ use_kerberos=ANY,
+ success=True)
+
+ def test_bind_strict_with_kerb_user(self):
+ self.assert_kerberos_bind(self.TOKEN_BIND_KERB,
+ bind_level='strict',
+ use_kerberos=True,
+ success=True)
+
+ def test_bind_strict_without_kerb_user(self):
+ self.assert_kerberos_bind(self.TOKEN_BIND_KERB,
+ bind_level='strict',
+ use_kerberos=False,
+ success=False)
+
+ def test_bind_strict_with_unknown_bind(self):
+ self.assert_kerberos_bind(self.TOKEN_BIND_UNKNOWN,
+ bind_level='strict',
+ use_kerberos=ANY,
+ success=False)
+
+ # REQUIRED
+
+ def test_bind_required_with_regular_token(self):
+ self.assert_kerberos_bind(self.TOKEN_BIND_NONE,
+ bind_level='required',
+ use_kerberos=ANY,
+ success=False)
+
+ def test_bind_required_with_kerb_user(self):
+ self.assert_kerberos_bind(self.TOKEN_BIND_KERB,
+ bind_level='required',
+ use_kerberos=True,
+ success=True)
+
+ def test_bind_required_without_kerb_user(self):
+ self.assert_kerberos_bind(self.TOKEN_BIND_KERB,
+ bind_level='required',
+ use_kerberos=False,
+ success=False)
+
+ def test_bind_required_with_unknown_bind(self):
+ self.assert_kerberos_bind(self.TOKEN_BIND_UNKNOWN,
+ bind_level='required',
+ use_kerberos=ANY,
+ success=False)
+
+ # NAMED
+
+ def test_bind_named_with_regular_token(self):
+ self.assert_kerberos_bind(self.TOKEN_BIND_NONE,
+ bind_level='kerberos',
+ use_kerberos=ANY,
+ success=False)
+
+ def test_bind_named_with_kerb_user(self):
+ self.assert_kerberos_bind(self.TOKEN_BIND_KERB,
+ bind_level='kerberos',
+ use_kerberos=True,
+ success=True)
+
+ def test_bind_named_without_kerb_user(self):
+ self.assert_kerberos_bind(self.TOKEN_BIND_KERB,
+ bind_level='kerberos',
+ use_kerberos=False,
+ success=False)
+
+ def test_bind_named_with_unknown_bind(self):
+ self.assert_kerberos_bind(self.TOKEN_BIND_UNKNOWN,
+ bind_level='kerberos',
+ use_kerberos=ANY,
+ success=False)
+
+ def test_bind_named_with_unknown_scheme(self):
+ self.assert_kerberos_bind(self.ALL_TOKENS,
+ bind_level='unknown',
+ use_kerberos=ANY,
+ success=False)
diff --git a/keystone-moon/keystone/tests/unit/test_token_provider.py b/keystone-moon/keystone/tests/unit/test_token_provider.py
new file mode 100644
index 00000000..dc08664f
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/test_token_provider.py
@@ -0,0 +1,836 @@
+# Copyright 2013 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import datetime
+
+from oslo_config import cfg
+from oslo_utils import timeutils
+
+from keystone.common import dependency
+from keystone import exception
+from keystone.tests import unit as tests
+from keystone.tests.unit.ksfixtures import database
+from keystone import token
+from keystone.token.providers import pki
+from keystone.token.providers import uuid
+
+
+CONF = cfg.CONF
+
+FUTURE_DELTA = datetime.timedelta(seconds=CONF.token.expiration)
+CURRENT_DATE = timeutils.utcnow()
+
+SAMPLE_V2_TOKEN = {
+ "access": {
+ "trust": {
+ "id": "abc123",
+ "trustee_user_id": "123456",
+ "trustor_user_id": "333333",
+ "impersonation": False
+ },
+ "serviceCatalog": [
+ {
+ "endpoints": [
+ {
+ "adminURL": "http://localhost:8774/v1.1/01257",
+ "id": "51934fe63a5b4ac0a32664f64eb462c3",
+ "internalURL": "http://localhost:8774/v1.1/01257",
+ "publicURL": "http://localhost:8774/v1.1/01257",
+ "region": "RegionOne"
+ }
+ ],
+ "endpoints_links": [],
+ "name": "nova",
+ "type": "compute"
+ },
+ {
+ "endpoints": [
+ {
+ "adminURL": "http://localhost:9292",
+ "id": "aaa17a539e364297a7845d67c7c7cc4b",
+ "internalURL": "http://localhost:9292",
+ "publicURL": "http://localhost:9292",
+ "region": "RegionOne"
+ }
+ ],
+ "endpoints_links": [],
+ "name": "glance",
+ "type": "image"
+ },
+ {
+ "endpoints": [
+ {
+ "adminURL": "http://localhost:8776/v1/01257",
+ "id": "077d82df25304abeac2294004441db5a",
+ "internalURL": "http://localhost:8776/v1/01257",
+ "publicURL": "http://localhost:8776/v1/01257",
+ "region": "RegionOne"
+ }
+ ],
+ "endpoints_links": [],
+ "name": "volume",
+ "type": "volume"
+ },
+ {
+ "endpoints": [
+ {
+ "adminURL": "http://localhost:8773/services/Admin",
+ "id": "b06997fd08414903ad458836efaa9067",
+ "internalURL": "http://localhost:8773/services/Cloud",
+ "publicURL": "http://localhost:8773/services/Cloud",
+ "region": "RegionOne"
+ }
+ ],
+ "endpoints_links": [],
+ "name": "ec2",
+ "type": "ec2"
+ },
+ {
+ "endpoints": [
+ {
+ "adminURL": "http://localhost:8080/v1",
+ "id": "7bd0c643e05a4a2ab40902b2fa0dd4e6",
+ "internalURL": "http://localhost:8080/v1/AUTH_01257",
+ "publicURL": "http://localhost:8080/v1/AUTH_01257",
+ "region": "RegionOne"
+ }
+ ],
+ "endpoints_links": [],
+ "name": "swift",
+ "type": "object-store"
+ },
+ {
+ "endpoints": [
+ {
+ "adminURL": "http://localhost:35357/v2.0",
+ "id": "02850c5d1d094887bdc46e81e1e15dc7",
+ "internalURL": "http://localhost:5000/v2.0",
+ "publicURL": "http://localhost:5000/v2.0",
+ "region": "RegionOne"
+ }
+ ],
+ "endpoints_links": [],
+ "name": "keystone",
+ "type": "identity"
+ }
+ ],
+ "token": {
+ "expires": "2013-05-22T00:02:43.941430Z",
+ "id": "ce4fc2d36eea4cc9a36e666ac2f1029a",
+ "issued_at": "2013-05-21T00:02:43.941473Z",
+ "tenant": {
+ "enabled": True,
+ "id": "01257",
+ "name": "service"
+ }
+ },
+ "user": {
+ "id": "f19ddbe2c53c46f189fe66d0a7a9c9ce",
+ "name": "nova",
+ "roles": [
+ {
+ "name": "_member_"
+ },
+ {
+ "name": "admin"
+ }
+ ],
+ "roles_links": [],
+ "username": "nova"
+ }
+ }
+}
+
+SAMPLE_V3_TOKEN = {
+ "token": {
+ "catalog": [
+ {
+ "endpoints": [
+ {
+ "id": "02850c5d1d094887bdc46e81e1e15dc7",
+ "interface": "admin",
+ "region": "RegionOne",
+ "url": "http://localhost:35357/v2.0"
+ },
+ {
+ "id": "446e244b75034a9ab4b0811e82d0b7c8",
+ "interface": "internal",
+ "region": "RegionOne",
+ "url": "http://localhost:5000/v2.0"
+ },
+ {
+ "id": "47fa3d9f499240abb5dfcf2668f168cd",
+ "interface": "public",
+ "region": "RegionOne",
+ "url": "http://localhost:5000/v2.0"
+ }
+ ],
+ "id": "26d7541715a44a4d9adad96f9872b633",
+ "type": "identity",
+ },
+ {
+ "endpoints": [
+ {
+ "id": "aaa17a539e364297a7845d67c7c7cc4b",
+ "interface": "admin",
+ "region": "RegionOne",
+ "url": "http://localhost:9292"
+ },
+ {
+ "id": "4fa9620e42394cb1974736dce0856c71",
+ "interface": "internal",
+ "region": "RegionOne",
+ "url": "http://localhost:9292"
+ },
+ {
+ "id": "9673687f9bc441d88dec37942bfd603b",
+ "interface": "public",
+ "region": "RegionOne",
+ "url": "http://localhost:9292"
+ }
+ ],
+ "id": "d27a41843f4e4b0e8cf6dac4082deb0d",
+ "type": "image",
+ },
+ {
+ "endpoints": [
+ {
+ "id": "7bd0c643e05a4a2ab40902b2fa0dd4e6",
+ "interface": "admin",
+ "region": "RegionOne",
+ "url": "http://localhost:8080/v1"
+ },
+ {
+ "id": "43bef154594d4ccb8e49014d20624e1d",
+ "interface": "internal",
+ "region": "RegionOne",
+ "url": "http://localhost:8080/v1/AUTH_01257"
+ },
+ {
+ "id": "e63b5f5d7aa3493690189d0ff843b9b3",
+ "interface": "public",
+ "region": "RegionOne",
+ "url": "http://localhost:8080/v1/AUTH_01257"
+ }
+ ],
+ "id": "a669e152f1104810a4b6701aade721bb",
+ "type": "object-store",
+ },
+ {
+ "endpoints": [
+ {
+ "id": "51934fe63a5b4ac0a32664f64eb462c3",
+ "interface": "admin",
+ "region": "RegionOne",
+ "url": "http://localhost:8774/v1.1/01257"
+ },
+ {
+ "id": "869b535eea0d42e483ae9da0d868ebad",
+ "interface": "internal",
+ "region": "RegionOne",
+ "url": "http://localhost:8774/v1.1/01257"
+ },
+ {
+ "id": "93583824c18f4263a2245ca432b132a6",
+ "interface": "public",
+ "region": "RegionOne",
+ "url": "http://localhost:8774/v1.1/01257"
+ }
+ ],
+ "id": "7f32cc2af6c9476e82d75f80e8b3bbb8",
+ "type": "compute",
+ },
+ {
+ "endpoints": [
+ {
+ "id": "b06997fd08414903ad458836efaa9067",
+ "interface": "admin",
+ "region": "RegionOne",
+ "url": "http://localhost:8773/services/Admin"
+ },
+ {
+ "id": "411f7de7c9a8484c9b46c254fb2676e2",
+ "interface": "internal",
+ "region": "RegionOne",
+ "url": "http://localhost:8773/services/Cloud"
+ },
+ {
+ "id": "f21c93f3da014785854b4126d0109c49",
+ "interface": "public",
+ "region": "RegionOne",
+ "url": "http://localhost:8773/services/Cloud"
+ }
+ ],
+ "id": "b08c9c7d4ef543eba5eeb766f72e5aa1",
+ "type": "ec2",
+ },
+ {
+ "endpoints": [
+ {
+ "id": "077d82df25304abeac2294004441db5a",
+ "interface": "admin",
+ "region": "RegionOne",
+ "url": "http://localhost:8776/v1/01257"
+ },
+ {
+ "id": "875bf282362c40219665278b4fd11467",
+ "interface": "internal",
+ "region": "RegionOne",
+ "url": "http://localhost:8776/v1/01257"
+ },
+ {
+ "id": "cd229aa6df0640dc858a8026eb7e640c",
+ "interface": "public",
+ "region": "RegionOne",
+ "url": "http://localhost:8776/v1/01257"
+ }
+ ],
+ "id": "5db21b82617f4a95816064736a7bec22",
+ "type": "volume",
+ }
+ ],
+ "expires_at": "2013-05-22T00:02:43.941430Z",
+ "issued_at": "2013-05-21T00:02:43.941473Z",
+ "methods": [
+ "password"
+ ],
+ "project": {
+ "domain": {
+ "id": "default",
+ "name": "Default"
+ },
+ "id": "01257",
+ "name": "service"
+ },
+ "roles": [
+ {
+ "id": "9fe2ff9ee4384b1894a90878d3e92bab",
+ "name": "_member_"
+ },
+ {
+ "id": "53bff13443bd4450b97f978881d47b18",
+ "name": "admin"
+ }
+ ],
+ "user": {
+ "domain": {
+ "id": "default",
+ "name": "Default"
+ },
+ "id": "f19ddbe2c53c46f189fe66d0a7a9c9ce",
+ "name": "nova"
+ },
+ "OS-TRUST:trust": {
+ "id": "abc123",
+ "trustee_user_id": "123456",
+ "trustor_user_id": "333333",
+ "impersonation": False
+ }
+ }
+}
+
+SAMPLE_V2_TOKEN_WITH_EMBEDED_VERSION = {
+ "access": {
+ "trust": {
+ "id": "abc123",
+ "trustee_user_id": "123456",
+ "trustor_user_id": "333333",
+ "impersonation": False
+ },
+ "serviceCatalog": [
+ {
+ "endpoints": [
+ {
+ "adminURL": "http://localhost:8774/v1.1/01257",
+ "id": "51934fe63a5b4ac0a32664f64eb462c3",
+ "internalURL": "http://localhost:8774/v1.1/01257",
+ "publicURL": "http://localhost:8774/v1.1/01257",
+ "region": "RegionOne"
+ }
+ ],
+ "endpoints_links": [],
+ "name": "nova",
+ "type": "compute"
+ },
+ {
+ "endpoints": [
+ {
+ "adminURL": "http://localhost:9292",
+ "id": "aaa17a539e364297a7845d67c7c7cc4b",
+ "internalURL": "http://localhost:9292",
+ "publicURL": "http://localhost:9292",
+ "region": "RegionOne"
+ }
+ ],
+ "endpoints_links": [],
+ "name": "glance",
+ "type": "image"
+ },
+ {
+ "endpoints": [
+ {
+ "adminURL": "http://localhost:8776/v1/01257",
+ "id": "077d82df25304abeac2294004441db5a",
+ "internalURL": "http://localhost:8776/v1/01257",
+ "publicURL": "http://localhost:8776/v1/01257",
+ "region": "RegionOne"
+ }
+ ],
+ "endpoints_links": [],
+ "name": "volume",
+ "type": "volume"
+ },
+ {
+ "endpoints": [
+ {
+ "adminURL": "http://localhost:8773/services/Admin",
+ "id": "b06997fd08414903ad458836efaa9067",
+ "internalURL": "http://localhost:8773/services/Cloud",
+ "publicURL": "http://localhost:8773/services/Cloud",
+ "region": "RegionOne"
+ }
+ ],
+ "endpoints_links": [],
+ "name": "ec2",
+ "type": "ec2"
+ },
+ {
+ "endpoints": [
+ {
+ "adminURL": "http://localhost:8080/v1",
+ "id": "7bd0c643e05a4a2ab40902b2fa0dd4e6",
+ "internalURL": "http://localhost:8080/v1/AUTH_01257",
+ "publicURL": "http://localhost:8080/v1/AUTH_01257",
+ "region": "RegionOne"
+ }
+ ],
+ "endpoints_links": [],
+ "name": "swift",
+ "type": "object-store"
+ },
+ {
+ "endpoints": [
+ {
+ "adminURL": "http://localhost:35357/v2.0",
+ "id": "02850c5d1d094887bdc46e81e1e15dc7",
+ "internalURL": "http://localhost:5000/v2.0",
+ "publicURL": "http://localhost:5000/v2.0",
+ "region": "RegionOne"
+ }
+ ],
+ "endpoints_links": [],
+ "name": "keystone",
+ "type": "identity"
+ }
+ ],
+ "token": {
+ "expires": "2013-05-22T00:02:43.941430Z",
+ "id": "ce4fc2d36eea4cc9a36e666ac2f1029a",
+ "issued_at": "2013-05-21T00:02:43.941473Z",
+ "tenant": {
+ "enabled": True,
+ "id": "01257",
+ "name": "service"
+ }
+ },
+ "user": {
+ "id": "f19ddbe2c53c46f189fe66d0a7a9c9ce",
+ "name": "nova",
+ "roles": [
+ {
+ "name": "_member_"
+ },
+ {
+ "name": "admin"
+ }
+ ],
+ "roles_links": [],
+ "username": "nova"
+ }
+ },
+ 'token_version': 'v2.0'
+}
+SAMPLE_V3_TOKEN_WITH_EMBEDED_VERSION = {
+ "token": {
+ "catalog": [
+ {
+ "endpoints": [
+ {
+ "id": "02850c5d1d094887bdc46e81e1e15dc7",
+ "interface": "admin",
+ "region": "RegionOne",
+ "url": "http://localhost:35357/v2.0"
+ },
+ {
+ "id": "446e244b75034a9ab4b0811e82d0b7c8",
+ "interface": "internal",
+ "region": "RegionOne",
+ "url": "http://localhost:5000/v2.0"
+ },
+ {
+ "id": "47fa3d9f499240abb5dfcf2668f168cd",
+ "interface": "public",
+ "region": "RegionOne",
+ "url": "http://localhost:5000/v2.0"
+ }
+ ],
+ "id": "26d7541715a44a4d9adad96f9872b633",
+ "type": "identity",
+ },
+ {
+ "endpoints": [
+ {
+ "id": "aaa17a539e364297a7845d67c7c7cc4b",
+ "interface": "admin",
+ "region": "RegionOne",
+ "url": "http://localhost:9292"
+ },
+ {
+ "id": "4fa9620e42394cb1974736dce0856c71",
+ "interface": "internal",
+ "region": "RegionOne",
+ "url": "http://localhost:9292"
+ },
+ {
+ "id": "9673687f9bc441d88dec37942bfd603b",
+ "interface": "public",
+ "region": "RegionOne",
+ "url": "http://localhost:9292"
+ }
+ ],
+ "id": "d27a41843f4e4b0e8cf6dac4082deb0d",
+ "type": "image",
+ },
+ {
+ "endpoints": [
+ {
+ "id": "7bd0c643e05a4a2ab40902b2fa0dd4e6",
+ "interface": "admin",
+ "region": "RegionOne",
+ "url": "http://localhost:8080/v1"
+ },
+ {
+ "id": "43bef154594d4ccb8e49014d20624e1d",
+ "interface": "internal",
+ "region": "RegionOne",
+ "url": "http://localhost:8080/v1/AUTH_01257"
+ },
+ {
+ "id": "e63b5f5d7aa3493690189d0ff843b9b3",
+ "interface": "public",
+ "region": "RegionOne",
+ "url": "http://localhost:8080/v1/AUTH_01257"
+ }
+ ],
+ "id": "a669e152f1104810a4b6701aade721bb",
+ "type": "object-store",
+ },
+ {
+ "endpoints": [
+ {
+ "id": "51934fe63a5b4ac0a32664f64eb462c3",
+ "interface": "admin",
+ "region": "RegionOne",
+ "url": "http://localhost:8774/v1.1/01257"
+ },
+ {
+ "id": "869b535eea0d42e483ae9da0d868ebad",
+ "interface": "internal",
+ "region": "RegionOne",
+ "url": "http://localhost:8774/v1.1/01257"
+ },
+ {
+ "id": "93583824c18f4263a2245ca432b132a6",
+ "interface": "public",
+ "region": "RegionOne",
+ "url": "http://localhost:8774/v1.1/01257"
+ }
+ ],
+ "id": "7f32cc2af6c9476e82d75f80e8b3bbb8",
+ "type": "compute",
+ },
+ {
+ "endpoints": [
+ {
+ "id": "b06997fd08414903ad458836efaa9067",
+ "interface": "admin",
+ "region": "RegionOne",
+ "url": "http://localhost:8773/services/Admin"
+ },
+ {
+ "id": "411f7de7c9a8484c9b46c254fb2676e2",
+ "interface": "internal",
+ "region": "RegionOne",
+ "url": "http://localhost:8773/services/Cloud"
+ },
+ {
+ "id": "f21c93f3da014785854b4126d0109c49",
+ "interface": "public",
+ "region": "RegionOne",
+ "url": "http://localhost:8773/services/Cloud"
+ }
+ ],
+ "id": "b08c9c7d4ef543eba5eeb766f72e5aa1",
+ "type": "ec2",
+ },
+ {
+ "endpoints": [
+ {
+ "id": "077d82df25304abeac2294004441db5a",
+ "interface": "admin",
+ "region": "RegionOne",
+ "url": "http://localhost:8776/v1/01257"
+ },
+ {
+ "id": "875bf282362c40219665278b4fd11467",
+ "interface": "internal",
+ "region": "RegionOne",
+ "url": "http://localhost:8776/v1/01257"
+ },
+ {
+ "id": "cd229aa6df0640dc858a8026eb7e640c",
+ "interface": "public",
+ "region": "RegionOne",
+ "url": "http://localhost:8776/v1/01257"
+ }
+ ],
+ "id": "5db21b82617f4a95816064736a7bec22",
+ "type": "volume",
+ }
+ ],
+ "expires_at": "2013-05-22T00:02:43.941430Z",
+ "issued_at": "2013-05-21T00:02:43.941473Z",
+ "methods": [
+ "password"
+ ],
+ "project": {
+ "domain": {
+ "id": "default",
+ "name": "Default"
+ },
+ "id": "01257",
+ "name": "service"
+ },
+ "roles": [
+ {
+ "id": "9fe2ff9ee4384b1894a90878d3e92bab",
+ "name": "_member_"
+ },
+ {
+ "id": "53bff13443bd4450b97f978881d47b18",
+ "name": "admin"
+ }
+ ],
+ "user": {
+ "domain": {
+ "id": "default",
+ "name": "Default"
+ },
+ "id": "f19ddbe2c53c46f189fe66d0a7a9c9ce",
+ "name": "nova"
+ },
+ "OS-TRUST:trust": {
+ "id": "abc123",
+ "trustee_user_id": "123456",
+ "trustor_user_id": "333333",
+ "impersonation": False
+ }
+ },
+ 'token_version': 'v3.0'
+}
+
+
+def create_v2_token():
+ return {
+ "access": {
+ "token": {
+ "expires": timeutils.isotime(timeutils.utcnow() +
+ FUTURE_DELTA),
+ "issued_at": "2013-05-21T00:02:43.941473Z",
+ "tenant": {
+ "enabled": True,
+ "id": "01257",
+ "name": "service"
+ }
+ }
+ }
+ }
+
+
+SAMPLE_V2_TOKEN_EXPIRED = {
+ "access": {
+ "token": {
+ "expires": timeutils.isotime(CURRENT_DATE),
+ "issued_at": "2013-05-21T00:02:43.941473Z",
+ "tenant": {
+ "enabled": True,
+ "id": "01257",
+ "name": "service"
+ }
+ }
+ }
+}
+
+
+def create_v3_token():
+ return {
+ "token": {
+ 'methods': [],
+ "expires_at": timeutils.isotime(timeutils.utcnow() + FUTURE_DELTA),
+ "issued_at": "2013-05-21T00:02:43.941473Z",
+ }
+ }
+
+
+SAMPLE_V3_TOKEN_EXPIRED = {
+ "token": {
+ "expires_at": timeutils.isotime(CURRENT_DATE),
+ "issued_at": "2013-05-21T00:02:43.941473Z",
+ }
+}
+
+SAMPLE_MALFORMED_TOKEN = {
+ "token": {
+ "bogus": {
+ "no expiration data": None
+ }
+ }
+}
+
+
+class TestTokenProvider(tests.TestCase):
+ def setUp(self):
+ super(TestTokenProvider, self).setUp()
+ self.useFixture(database.Database())
+ self.load_backends()
+
+ def test_get_token_version(self):
+ self.assertEqual(
+ token.provider.V2,
+ self.token_provider_api.get_token_version(SAMPLE_V2_TOKEN))
+ self.assertEqual(
+ token.provider.V2,
+ self.token_provider_api.get_token_version(
+ SAMPLE_V2_TOKEN_WITH_EMBEDED_VERSION))
+ self.assertEqual(
+ token.provider.V3,
+ self.token_provider_api.get_token_version(SAMPLE_V3_TOKEN))
+ self.assertEqual(
+ token.provider.V3,
+ self.token_provider_api.get_token_version(
+ SAMPLE_V3_TOKEN_WITH_EMBEDED_VERSION))
+ self.assertRaises(exception.UnsupportedTokenVersionException,
+ self.token_provider_api.get_token_version,
+ 'bogus')
+
+ def test_supported_token_providers(self):
+ # test default config
+
+ dependency.reset()
+ self.assertIsInstance(token.provider.Manager().driver,
+ uuid.Provider)
+
+ dependency.reset()
+ self.config_fixture.config(
+ group='token',
+ provider='keystone.token.providers.uuid.Provider')
+ token.provider.Manager()
+
+ dependency.reset()
+ self.config_fixture.config(
+ group='token',
+ provider='keystone.token.providers.pki.Provider')
+ token.provider.Manager()
+
+ dependency.reset()
+ self.config_fixture.config(
+ group='token',
+ provider='keystone.token.providers.pkiz.Provider')
+ token.provider.Manager()
+
+ def test_unsupported_token_provider(self):
+ self.config_fixture.config(group='token',
+ provider='my.package.MyProvider')
+ self.assertRaises(ImportError,
+ token.provider.Manager)
+
+ def test_provider_token_expiration_validation(self):
+ self.assertRaises(exception.TokenNotFound,
+ self.token_provider_api._is_valid_token,
+ SAMPLE_V2_TOKEN_EXPIRED)
+ self.assertRaises(exception.TokenNotFound,
+ self.token_provider_api._is_valid_token,
+ SAMPLE_V3_TOKEN_EXPIRED)
+ self.assertRaises(exception.TokenNotFound,
+ self.token_provider_api._is_valid_token,
+ SAMPLE_MALFORMED_TOKEN)
+ self.assertIsNone(
+ self.token_provider_api._is_valid_token(create_v2_token()))
+ self.assertIsNone(
+ self.token_provider_api._is_valid_token(create_v3_token()))
+
+
+# NOTE(ayoung): renamed to avoid automatic test detection
+class PKIProviderTests(object):
+
+ def setUp(self):
+ super(PKIProviderTests, self).setUp()
+
+ from keystoneclient.common import cms
+ self.cms = cms
+
+ from keystone.common import environment
+ self.environment = environment
+
+ old_cms_subprocess = cms.subprocess
+ self.addCleanup(setattr, cms, 'subprocess', old_cms_subprocess)
+
+ old_env_subprocess = environment.subprocess
+ self.addCleanup(setattr, environment, 'subprocess', old_env_subprocess)
+
+ self.cms.subprocess = self.target_subprocess
+ self.environment.subprocess = self.target_subprocess
+
+ reload(pki) # force module reload so the imports get re-evaluated
+
+ def test_get_token_id_error_handling(self):
+ # cause command-line failure
+ self.config_fixture.config(group='signing',
+ keyfile='--please-break-me')
+
+ provider = pki.Provider()
+ token_data = {}
+ self.assertRaises(exception.UnexpectedError,
+ provider._get_token_id,
+ token_data)
+
+
+class TestPKIProviderWithEventlet(PKIProviderTests, tests.TestCase):
+
+ def setUp(self):
+ # force keystoneclient.common.cms to use eventlet's subprocess
+ from eventlet.green import subprocess
+ self.target_subprocess = subprocess
+
+ super(TestPKIProviderWithEventlet, self).setUp()
+
+
+class TestPKIProviderWithStdlib(PKIProviderTests, tests.TestCase):
+
+ def setUp(self):
+ # force keystoneclient.common.cms to use the stdlib subprocess
+ import subprocess
+ self.target_subprocess = subprocess
+
+ super(TestPKIProviderWithStdlib, self).setUp()
diff --git a/keystone-moon/keystone/tests/unit/test_url_middleware.py b/keystone-moon/keystone/tests/unit/test_url_middleware.py
new file mode 100644
index 00000000..1b3872b5
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/test_url_middleware.py
@@ -0,0 +1,53 @@
+# Copyright 2012 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import webob
+
+from keystone import middleware
+from keystone.tests import unit as tests
+
+
+class FakeApp(object):
+ """Fakes a WSGI app URL normalized."""
+ def __call__(self, env, start_response):
+ resp = webob.Response()
+ resp.body = 'SUCCESS'
+ return resp(env, start_response)
+
+
+class UrlMiddlewareTest(tests.TestCase):
+ def setUp(self):
+ self.middleware = middleware.NormalizingFilter(FakeApp())
+ self.response_status = None
+ self.response_headers = None
+ super(UrlMiddlewareTest, self).setUp()
+
+ def start_fake_response(self, status, headers):
+ self.response_status = int(status.split(' ', 1)[0])
+ self.response_headers = dict(headers)
+
+ def test_trailing_slash_normalization(self):
+ """Tests /v2.0/tokens and /v2.0/tokens/ normalized URLs match."""
+ req1 = webob.Request.blank('/v2.0/tokens')
+ req2 = webob.Request.blank('/v2.0/tokens/')
+ self.middleware(req1.environ, self.start_fake_response)
+ self.middleware(req2.environ, self.start_fake_response)
+ self.assertEqual(req1.path_url, req2.path_url)
+ self.assertEqual('http://localhost/v2.0/tokens', req1.path_url)
+
+ def test_rewrite_empty_path(self):
+ """Tests empty path is rewritten to root."""
+ req = webob.Request.blank('')
+ self.middleware(req.environ, self.start_fake_response)
+ self.assertEqual('http://localhost/', req.path_url)
diff --git a/keystone-moon/keystone/tests/unit/test_v2.py b/keystone-moon/keystone/tests/unit/test_v2.py
new file mode 100644
index 00000000..8c7c3792
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/test_v2.py
@@ -0,0 +1,1500 @@
+# Copyright 2012 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import json
+import time
+import uuid
+
+from keystoneclient.common import cms
+from oslo_config import cfg
+import six
+from testtools import matchers
+
+from keystone.common import extension as keystone_extension
+from keystone.tests.unit import ksfixtures
+from keystone.tests.unit import rest
+
+
+CONF = cfg.CONF
+
+
+class CoreApiTests(object):
+ def assertValidError(self, error):
+ self.assertIsNotNone(error.get('code'))
+ self.assertIsNotNone(error.get('title'))
+ self.assertIsNotNone(error.get('message'))
+
+ def assertValidVersion(self, version):
+ self.assertIsNotNone(version)
+ self.assertIsNotNone(version.get('id'))
+ self.assertIsNotNone(version.get('status'))
+ self.assertIsNotNone(version.get('updated'))
+
+ def assertValidExtension(self, extension):
+ self.assertIsNotNone(extension)
+ self.assertIsNotNone(extension.get('name'))
+ self.assertIsNotNone(extension.get('namespace'))
+ self.assertIsNotNone(extension.get('alias'))
+ self.assertIsNotNone(extension.get('updated'))
+
+ def assertValidExtensionLink(self, link):
+ self.assertIsNotNone(link.get('rel'))
+ self.assertIsNotNone(link.get('type'))
+ self.assertIsNotNone(link.get('href'))
+
+ def assertValidTenant(self, tenant):
+ self.assertIsNotNone(tenant.get('id'))
+ self.assertIsNotNone(tenant.get('name'))
+
+ def assertValidUser(self, user):
+ self.assertIsNotNone(user.get('id'))
+ self.assertIsNotNone(user.get('name'))
+
+ def assertValidRole(self, tenant):
+ self.assertIsNotNone(tenant.get('id'))
+ self.assertIsNotNone(tenant.get('name'))
+
+ def test_public_not_found(self):
+ r = self.public_request(
+ path='/%s' % uuid.uuid4().hex,
+ expected_status=404)
+ self.assertValidErrorResponse(r)
+
+ def test_admin_not_found(self):
+ r = self.admin_request(
+ path='/%s' % uuid.uuid4().hex,
+ expected_status=404)
+ self.assertValidErrorResponse(r)
+
+ def test_public_multiple_choice(self):
+ r = self.public_request(path='/', expected_status=300)
+ self.assertValidMultipleChoiceResponse(r)
+
+ def test_admin_multiple_choice(self):
+ r = self.admin_request(path='/', expected_status=300)
+ self.assertValidMultipleChoiceResponse(r)
+
+ def test_public_version(self):
+ r = self.public_request(path='/v2.0/')
+ self.assertValidVersionResponse(r)
+
+ def test_admin_version(self):
+ r = self.admin_request(path='/v2.0/')
+ self.assertValidVersionResponse(r)
+
+ def test_public_extensions(self):
+ r = self.public_request(path='/v2.0/extensions')
+ self.assertValidExtensionListResponse(
+ r, keystone_extension.PUBLIC_EXTENSIONS)
+
+ def test_admin_extensions(self):
+ r = self.admin_request(path='/v2.0/extensions')
+ self.assertValidExtensionListResponse(
+ r, keystone_extension.ADMIN_EXTENSIONS)
+
+ def test_admin_extensions_404(self):
+ self.admin_request(path='/v2.0/extensions/invalid-extension',
+ expected_status=404)
+
+ def test_public_osksadm_extension_404(self):
+ self.public_request(path='/v2.0/extensions/OS-KSADM',
+ expected_status=404)
+
+ def test_admin_osksadm_extension(self):
+ r = self.admin_request(path='/v2.0/extensions/OS-KSADM')
+ self.assertValidExtensionResponse(
+ r, keystone_extension.ADMIN_EXTENSIONS)
+
+ def test_authenticate(self):
+ r = self.public_request(
+ method='POST',
+ path='/v2.0/tokens',
+ body={
+ 'auth': {
+ 'passwordCredentials': {
+ 'username': self.user_foo['name'],
+ 'password': self.user_foo['password'],
+ },
+ 'tenantId': self.tenant_bar['id'],
+ },
+ },
+ expected_status=200)
+ self.assertValidAuthenticationResponse(r, require_service_catalog=True)
+
+ def test_authenticate_unscoped(self):
+ r = self.public_request(
+ method='POST',
+ path='/v2.0/tokens',
+ body={
+ 'auth': {
+ 'passwordCredentials': {
+ 'username': self.user_foo['name'],
+ 'password': self.user_foo['password'],
+ },
+ },
+ },
+ expected_status=200)
+ self.assertValidAuthenticationResponse(r)
+
+ def test_get_tenants_for_token(self):
+ r = self.public_request(path='/v2.0/tenants',
+ token=self.get_scoped_token())
+ self.assertValidTenantListResponse(r)
+
+ def test_validate_token(self):
+ token = self.get_scoped_token()
+ r = self.admin_request(
+ path='/v2.0/tokens/%(token_id)s' % {
+ 'token_id': token,
+ },
+ token=token)
+ self.assertValidAuthenticationResponse(r)
+
+ def test_invalid_token_404(self):
+ token = self.get_scoped_token()
+ self.admin_request(
+ path='/v2.0/tokens/%(token_id)s' % {
+ 'token_id': 'invalid',
+ },
+ token=token,
+ expected_status=404)
+
+ def test_validate_token_service_role(self):
+ self.md_foobar = self.assignment_api.add_role_to_user_and_project(
+ self.user_foo['id'],
+ self.tenant_service['id'],
+ self.role_service['id'])
+
+ token = self.get_scoped_token(tenant_id='service')
+ r = self.admin_request(
+ path='/v2.0/tokens/%s' % token,
+ token=token)
+ self.assertValidAuthenticationResponse(r)
+
+ def test_remove_role_revokes_token(self):
+ self.md_foobar = self.assignment_api.add_role_to_user_and_project(
+ self.user_foo['id'],
+ self.tenant_service['id'],
+ self.role_service['id'])
+
+ token = self.get_scoped_token(tenant_id='service')
+ r = self.admin_request(
+ path='/v2.0/tokens/%s' % token,
+ token=token)
+ self.assertValidAuthenticationResponse(r)
+
+ self.assignment_api.remove_role_from_user_and_project(
+ self.user_foo['id'],
+ self.tenant_service['id'],
+ self.role_service['id'])
+
+ r = self.admin_request(
+ path='/v2.0/tokens/%s' % token,
+ token=token,
+ expected_status=401)
+
+ def test_validate_token_belongs_to(self):
+ token = self.get_scoped_token()
+ path = ('/v2.0/tokens/%s?belongsTo=%s' % (token,
+ self.tenant_bar['id']))
+ r = self.admin_request(path=path, token=token)
+ self.assertValidAuthenticationResponse(r, require_service_catalog=True)
+
+ def test_validate_token_no_belongs_to_still_returns_catalog(self):
+ token = self.get_scoped_token()
+ path = ('/v2.0/tokens/%s' % token)
+ r = self.admin_request(path=path, token=token)
+ self.assertValidAuthenticationResponse(r, require_service_catalog=True)
+
+ def test_validate_token_head(self):
+ """The same call as above, except using HEAD.
+
+ There's no response to validate here, but this is included for the
+ sake of completely covering the core API.
+
+ """
+ token = self.get_scoped_token()
+ self.admin_request(
+ method='HEAD',
+ path='/v2.0/tokens/%(token_id)s' % {
+ 'token_id': token,
+ },
+ token=token,
+ expected_status=200)
+
+ def test_endpoints(self):
+ token = self.get_scoped_token()
+ r = self.admin_request(
+ path='/v2.0/tokens/%(token_id)s/endpoints' % {
+ 'token_id': token,
+ },
+ token=token)
+ self.assertValidEndpointListResponse(r)
+
+ def test_get_tenant(self):
+ token = self.get_scoped_token()
+ r = self.admin_request(
+ path='/v2.0/tenants/%(tenant_id)s' % {
+ 'tenant_id': self.tenant_bar['id'],
+ },
+ token=token)
+ self.assertValidTenantResponse(r)
+
+ def test_get_tenant_by_name(self):
+ token = self.get_scoped_token()
+ r = self.admin_request(
+ path='/v2.0/tenants?name=%(tenant_name)s' % {
+ 'tenant_name': self.tenant_bar['name'],
+ },
+ token=token)
+ self.assertValidTenantResponse(r)
+
+ def test_get_user_roles_with_tenant(self):
+ token = self.get_scoped_token()
+ r = self.admin_request(
+ path='/v2.0/tenants/%(tenant_id)s/users/%(user_id)s/roles' % {
+ 'tenant_id': self.tenant_bar['id'],
+ 'user_id': self.user_foo['id'],
+ },
+ token=token)
+ self.assertValidRoleListResponse(r)
+
+ def test_get_user(self):
+ token = self.get_scoped_token()
+ r = self.admin_request(
+ path='/v2.0/users/%(user_id)s' % {
+ 'user_id': self.user_foo['id'],
+ },
+ token=token)
+ self.assertValidUserResponse(r)
+
+ def test_get_user_by_name(self):
+ token = self.get_scoped_token()
+ r = self.admin_request(
+ path='/v2.0/users?name=%(user_name)s' % {
+ 'user_name': self.user_foo['name'],
+ },
+ token=token)
+ self.assertValidUserResponse(r)
+
+ def test_create_update_user_invalid_enabled_type(self):
+ # Enforce usage of boolean for 'enabled' field
+ token = self.get_scoped_token()
+
+ # Test CREATE request
+ r = self.admin_request(
+ method='POST',
+ path='/v2.0/users',
+ body={
+ 'user': {
+ 'name': uuid.uuid4().hex,
+ 'password': uuid.uuid4().hex,
+ 'enabled': "False",
+ },
+ },
+ token=token,
+ expected_status=400)
+ self.assertValidErrorResponse(r)
+
+ r = self.admin_request(
+ method='POST',
+ path='/v2.0/users',
+ body={
+ 'user': {
+ 'name': uuid.uuid4().hex,
+ 'password': uuid.uuid4().hex,
+ # In JSON, 0|1 are not booleans
+ 'enabled': 0,
+ },
+ },
+ token=token,
+ expected_status=400)
+ self.assertValidErrorResponse(r)
+
+ # Test UPDATE request
+ path = '/v2.0/users/%(user_id)s' % {
+ 'user_id': self.user_foo['id'],
+ }
+
+ r = self.admin_request(
+ method='PUT',
+ path=path,
+ body={
+ 'user': {
+ 'enabled': "False",
+ },
+ },
+ token=token,
+ expected_status=400)
+ self.assertValidErrorResponse(r)
+
+ r = self.admin_request(
+ method='PUT',
+ path=path,
+ body={
+ 'user': {
+ # In JSON, 0|1 are not booleans
+ 'enabled': 1,
+ },
+ },
+ token=token,
+ expected_status=400)
+ self.assertValidErrorResponse(r)
+
+ def test_create_update_user_valid_enabled_type(self):
+ # Enforce usage of boolean for 'enabled' field
+ token = self.get_scoped_token()
+
+ # Test CREATE request
+ self.admin_request(method='POST',
+ path='/v2.0/users',
+ body={
+ 'user': {
+ 'name': uuid.uuid4().hex,
+ 'password': uuid.uuid4().hex,
+ 'enabled': False,
+ },
+ },
+ token=token,
+ expected_status=200)
+
+ def test_error_response(self):
+ """This triggers assertValidErrorResponse by convention."""
+ self.public_request(path='/v2.0/tenants', expected_status=401)
+
+ def test_invalid_parameter_error_response(self):
+ token = self.get_scoped_token()
+ bad_body = {
+ 'OS-KSADM:service%s' % uuid.uuid4().hex: {
+ 'name': uuid.uuid4().hex,
+ 'type': uuid.uuid4().hex,
+ },
+ }
+ res = self.admin_request(method='POST',
+ path='/v2.0/OS-KSADM/services',
+ body=bad_body,
+ token=token,
+ expected_status=400)
+ self.assertValidErrorResponse(res)
+ res = self.admin_request(method='POST',
+ path='/v2.0/users',
+ body=bad_body,
+ token=token,
+ expected_status=400)
+ self.assertValidErrorResponse(res)
+
+ def _get_user_id(self, r):
+ """Helper method to return user ID from a response.
+
+ This needs to be overridden by child classes
+ based on their content type.
+
+ """
+ raise NotImplementedError()
+
+ def _get_role_id(self, r):
+ """Helper method to return a role ID from a response.
+
+ This needs to be overridden by child classes
+ based on their content type.
+
+ """
+ raise NotImplementedError()
+
+ def _get_role_name(self, r):
+ """Helper method to return role NAME from a response.
+
+ This needs to be overridden by child classes
+ based on their content type.
+
+ """
+ raise NotImplementedError()
+
+ def _get_project_id(self, r):
+ """Helper method to return project ID from a response.
+
+ This needs to be overridden by child classes
+ based on their content type.
+
+ """
+ raise NotImplementedError()
+
+ def assertNoRoles(self, r):
+ """Helper method to assert No Roles
+
+ This needs to be overridden by child classes
+ based on their content type.
+
+ """
+ raise NotImplementedError()
+
+ def test_update_user_tenant(self):
+ token = self.get_scoped_token()
+
+ # Create a new user
+ r = self.admin_request(
+ method='POST',
+ path='/v2.0/users',
+ body={
+ 'user': {
+ 'name': uuid.uuid4().hex,
+ 'password': uuid.uuid4().hex,
+ 'tenantId': self.tenant_bar['id'],
+ 'enabled': True,
+ },
+ },
+ token=token,
+ expected_status=200)
+
+ user_id = self._get_user_id(r.result)
+
+ # Check if member_role is in tenant_bar
+ r = self.admin_request(
+ path='/v2.0/tenants/%(project_id)s/users/%(user_id)s/roles' % {
+ 'project_id': self.tenant_bar['id'],
+ 'user_id': user_id
+ },
+ token=token,
+ expected_status=200)
+ self.assertEqual(CONF.member_role_name, self._get_role_name(r.result))
+
+ # Create a new tenant
+ r = self.admin_request(
+ method='POST',
+ path='/v2.0/tenants',
+ body={
+ 'tenant': {
+ 'name': 'test_update_user',
+ 'description': 'A description ...',
+ 'enabled': True,
+ },
+ },
+ token=token,
+ expected_status=200)
+
+ project_id = self._get_project_id(r.result)
+
+ # Update user's tenant
+ r = self.admin_request(
+ method='PUT',
+ path='/v2.0/users/%(user_id)s' % {
+ 'user_id': user_id,
+ },
+ body={
+ 'user': {
+ 'tenantId': project_id,
+ },
+ },
+ token=token,
+ expected_status=200)
+
+ # 'member_role' should be in new_tenant
+ r = self.admin_request(
+ path='/v2.0/tenants/%(project_id)s/users/%(user_id)s/roles' % {
+ 'project_id': project_id,
+ 'user_id': user_id
+ },
+ token=token,
+ expected_status=200)
+ self.assertEqual('_member_', self._get_role_name(r.result))
+
+ # 'member_role' should not be in tenant_bar any more
+ r = self.admin_request(
+ path='/v2.0/tenants/%(project_id)s/users/%(user_id)s/roles' % {
+ 'project_id': self.tenant_bar['id'],
+ 'user_id': user_id
+ },
+ token=token,
+ expected_status=200)
+ self.assertNoRoles(r.result)
+
+ def test_update_user_with_invalid_tenant(self):
+ token = self.get_scoped_token()
+
+ # Create a new user
+ r = self.admin_request(
+ method='POST',
+ path='/v2.0/users',
+ body={
+ 'user': {
+ 'name': 'test_invalid_tenant',
+ 'password': uuid.uuid4().hex,
+ 'tenantId': self.tenant_bar['id'],
+ 'enabled': True,
+ },
+ },
+ token=token,
+ expected_status=200)
+ user_id = self._get_user_id(r.result)
+
+ # Update user with an invalid tenant
+ r = self.admin_request(
+ method='PUT',
+ path='/v2.0/users/%(user_id)s' % {
+ 'user_id': user_id,
+ },
+ body={
+ 'user': {
+ 'tenantId': 'abcde12345heha',
+ },
+ },
+ token=token,
+ expected_status=404)
+
+ def test_update_user_with_invalid_tenant_no_prev_tenant(self):
+ token = self.get_scoped_token()
+
+ # Create a new user
+ r = self.admin_request(
+ method='POST',
+ path='/v2.0/users',
+ body={
+ 'user': {
+ 'name': 'test_invalid_tenant',
+ 'password': uuid.uuid4().hex,
+ 'enabled': True,
+ },
+ },
+ token=token,
+ expected_status=200)
+ user_id = self._get_user_id(r.result)
+
+ # Update user with an invalid tenant
+ r = self.admin_request(
+ method='PUT',
+ path='/v2.0/users/%(user_id)s' % {
+ 'user_id': user_id,
+ },
+ body={
+ 'user': {
+ 'tenantId': 'abcde12345heha',
+ },
+ },
+ token=token,
+ expected_status=404)
+
+ def test_update_user_with_old_tenant(self):
+ token = self.get_scoped_token()
+
+ # Create a new user
+ r = self.admin_request(
+ method='POST',
+ path='/v2.0/users',
+ body={
+ 'user': {
+ 'name': uuid.uuid4().hex,
+ 'password': uuid.uuid4().hex,
+ 'tenantId': self.tenant_bar['id'],
+ 'enabled': True,
+ },
+ },
+ token=token,
+ expected_status=200)
+
+ user_id = self._get_user_id(r.result)
+
+ # Check if member_role is in tenant_bar
+ r = self.admin_request(
+ path='/v2.0/tenants/%(project_id)s/users/%(user_id)s/roles' % {
+ 'project_id': self.tenant_bar['id'],
+ 'user_id': user_id
+ },
+ token=token,
+ expected_status=200)
+ self.assertEqual(CONF.member_role_name, self._get_role_name(r.result))
+
+ # Update user's tenant with old tenant id
+ r = self.admin_request(
+ method='PUT',
+ path='/v2.0/users/%(user_id)s' % {
+ 'user_id': user_id,
+ },
+ body={
+ 'user': {
+ 'tenantId': self.tenant_bar['id'],
+ },
+ },
+ token=token,
+ expected_status=200)
+
+ # 'member_role' should still be in tenant_bar
+ r = self.admin_request(
+ path='/v2.0/tenants/%(project_id)s/users/%(user_id)s/roles' % {
+ 'project_id': self.tenant_bar['id'],
+ 'user_id': user_id
+ },
+ token=token,
+ expected_status=200)
+ self.assertEqual('_member_', self._get_role_name(r.result))
+
+ def test_authenticating_a_user_with_no_password(self):
+ token = self.get_scoped_token()
+
+ username = uuid.uuid4().hex
+
+ # create the user
+ self.admin_request(
+ method='POST',
+ path='/v2.0/users',
+ body={
+ 'user': {
+ 'name': username,
+ 'enabled': True,
+ },
+ },
+ token=token)
+
+ # fail to authenticate
+ r = self.public_request(
+ method='POST',
+ path='/v2.0/tokens',
+ body={
+ 'auth': {
+ 'passwordCredentials': {
+ 'username': username,
+ 'password': 'password',
+ },
+ },
+ },
+ expected_status=401)
+ self.assertValidErrorResponse(r)
+
+ def test_www_authenticate_header(self):
+ r = self.public_request(
+ path='/v2.0/tenants',
+ expected_status=401)
+ self.assertEqual('Keystone uri="http://localhost"',
+ r.headers.get('WWW-Authenticate'))
+
+ def test_www_authenticate_header_host(self):
+ test_url = 'http://%s:4187' % uuid.uuid4().hex
+ self.config_fixture.config(public_endpoint=test_url)
+ r = self.public_request(
+ path='/v2.0/tenants',
+ expected_status=401)
+ self.assertEqual('Keystone uri="%s"' % test_url,
+ r.headers.get('WWW-Authenticate'))
+
+
+class LegacyV2UsernameTests(object):
+ """Tests to show the broken username behavior in V2.
+
+ The V2 API is documented to use `username` instead of `name`. The
+ API forced used to use name and left the username to fall into the
+ `extra` field.
+
+ These tests ensure this behavior works so fixes to `username`/`name`
+ will be backward compatible.
+ """
+
+ def create_user(self, **user_attrs):
+ """Creates a users and returns the response object.
+
+ :param user_attrs: attributes added to the request body (optional)
+ """
+ token = self.get_scoped_token()
+ body = {
+ 'user': {
+ 'name': uuid.uuid4().hex,
+ 'enabled': True,
+ },
+ }
+ body['user'].update(user_attrs)
+
+ return self.admin_request(
+ method='POST',
+ path='/v2.0/users',
+ token=token,
+ body=body,
+ expected_status=200)
+
+ def test_create_with_extra_username(self):
+ """The response for creating a user will contain the extra fields."""
+ fake_username = uuid.uuid4().hex
+ r = self.create_user(username=fake_username)
+
+ self.assertValidUserResponse(r)
+
+ user = self.get_user_from_response(r)
+ self.assertEqual(fake_username, user.get('username'))
+
+ def test_get_returns_username_from_extra(self):
+ """The response for getting a user will contain the extra fields."""
+ token = self.get_scoped_token()
+
+ fake_username = uuid.uuid4().hex
+ r = self.create_user(username=fake_username)
+
+ id_ = self.get_user_attribute_from_response(r, 'id')
+ r = self.admin_request(path='/v2.0/users/%s' % id_, token=token)
+
+ self.assertValidUserResponse(r)
+
+ user = self.get_user_from_response(r)
+ self.assertEqual(fake_username, user.get('username'))
+
+ def test_update_returns_new_username_when_adding_username(self):
+ """The response for updating a user will contain the extra fields.
+
+ This is specifically testing for updating a username when a value
+ was not previously set.
+ """
+ token = self.get_scoped_token()
+
+ r = self.create_user()
+
+ id_ = self.get_user_attribute_from_response(r, 'id')
+ name = self.get_user_attribute_from_response(r, 'name')
+ enabled = self.get_user_attribute_from_response(r, 'enabled')
+ r = self.admin_request(
+ method='PUT',
+ path='/v2.0/users/%s' % id_,
+ token=token,
+ body={
+ 'user': {
+ 'name': name,
+ 'username': 'new_username',
+ 'enabled': enabled,
+ },
+ },
+ expected_status=200)
+
+ self.assertValidUserResponse(r)
+
+ user = self.get_user_from_response(r)
+ self.assertEqual('new_username', user.get('username'))
+
+ def test_update_returns_new_username_when_updating_username(self):
+ """The response for updating a user will contain the extra fields.
+
+ This tests updating a username that was previously set.
+ """
+ token = self.get_scoped_token()
+
+ r = self.create_user(username='original_username')
+
+ id_ = self.get_user_attribute_from_response(r, 'id')
+ name = self.get_user_attribute_from_response(r, 'name')
+ enabled = self.get_user_attribute_from_response(r, 'enabled')
+ r = self.admin_request(
+ method='PUT',
+ path='/v2.0/users/%s' % id_,
+ token=token,
+ body={
+ 'user': {
+ 'name': name,
+ 'username': 'new_username',
+ 'enabled': enabled,
+ },
+ },
+ expected_status=200)
+
+ self.assertValidUserResponse(r)
+
+ user = self.get_user_from_response(r)
+ self.assertEqual('new_username', user.get('username'))
+
+ def test_username_is_always_returned_create(self):
+ """Username is set as the value of name if no username is provided.
+
+ This matches the v2.0 spec where we really should be using username
+ and not name.
+ """
+ r = self.create_user()
+
+ self.assertValidUserResponse(r)
+
+ user = self.get_user_from_response(r)
+ self.assertEqual(user.get('name'), user.get('username'))
+
+ def test_username_is_always_returned_get(self):
+ """Username is set as the value of name if no username is provided.
+
+ This matches the v2.0 spec where we really should be using username
+ and not name.
+ """
+ token = self.get_scoped_token()
+
+ r = self.create_user()
+
+ id_ = self.get_user_attribute_from_response(r, 'id')
+ r = self.admin_request(path='/v2.0/users/%s' % id_, token=token)
+
+ self.assertValidUserResponse(r)
+
+ user = self.get_user_from_response(r)
+ self.assertEqual(user.get('name'), user.get('username'))
+
+ def test_username_is_always_returned_get_by_name(self):
+ """Username is set as the value of name if no username is provided.
+
+ This matches the v2.0 spec where we really should be using username
+ and not name.
+ """
+ token = self.get_scoped_token()
+
+ r = self.create_user()
+
+ name = self.get_user_attribute_from_response(r, 'name')
+ r = self.admin_request(path='/v2.0/users?name=%s' % name, token=token)
+
+ self.assertValidUserResponse(r)
+
+ user = self.get_user_from_response(r)
+ self.assertEqual(user.get('name'), user.get('username'))
+
+ def test_username_is_always_returned_update_no_username_provided(self):
+ """Username is set as the value of name if no username is provided.
+
+ This matches the v2.0 spec where we really should be using username
+ and not name.
+ """
+ token = self.get_scoped_token()
+
+ r = self.create_user()
+
+ id_ = self.get_user_attribute_from_response(r, 'id')
+ name = self.get_user_attribute_from_response(r, 'name')
+ enabled = self.get_user_attribute_from_response(r, 'enabled')
+ r = self.admin_request(
+ method='PUT',
+ path='/v2.0/users/%s' % id_,
+ token=token,
+ body={
+ 'user': {
+ 'name': name,
+ 'enabled': enabled,
+ },
+ },
+ expected_status=200)
+
+ self.assertValidUserResponse(r)
+
+ user = self.get_user_from_response(r)
+ self.assertEqual(user.get('name'), user.get('username'))
+
+ def test_updated_username_is_returned(self):
+ """Username is set as the value of name if no username is provided.
+
+ This matches the v2.0 spec where we really should be using username
+ and not name.
+ """
+ token = self.get_scoped_token()
+
+ r = self.create_user()
+
+ id_ = self.get_user_attribute_from_response(r, 'id')
+ name = self.get_user_attribute_from_response(r, 'name')
+ enabled = self.get_user_attribute_from_response(r, 'enabled')
+ r = self.admin_request(
+ method='PUT',
+ path='/v2.0/users/%s' % id_,
+ token=token,
+ body={
+ 'user': {
+ 'name': name,
+ 'enabled': enabled,
+ },
+ },
+ expected_status=200)
+
+ self.assertValidUserResponse(r)
+
+ user = self.get_user_from_response(r)
+ self.assertEqual(user.get('name'), user.get('username'))
+
+ def test_username_can_be_used_instead_of_name_create(self):
+ token = self.get_scoped_token()
+
+ r = self.admin_request(
+ method='POST',
+ path='/v2.0/users',
+ token=token,
+ body={
+ 'user': {
+ 'username': uuid.uuid4().hex,
+ 'enabled': True,
+ },
+ },
+ expected_status=200)
+
+ self.assertValidUserResponse(r)
+
+ user = self.get_user_from_response(r)
+ self.assertEqual(user.get('name'), user.get('username'))
+
+ def test_username_can_be_used_instead_of_name_update(self):
+ token = self.get_scoped_token()
+
+ r = self.create_user()
+
+ id_ = self.get_user_attribute_from_response(r, 'id')
+ new_username = uuid.uuid4().hex
+ enabled = self.get_user_attribute_from_response(r, 'enabled')
+ r = self.admin_request(
+ method='PUT',
+ path='/v2.0/users/%s' % id_,
+ token=token,
+ body={
+ 'user': {
+ 'username': new_username,
+ 'enabled': enabled,
+ },
+ },
+ expected_status=200)
+
+ self.assertValidUserResponse(r)
+
+ user = self.get_user_from_response(r)
+ self.assertEqual(new_username, user.get('name'))
+ self.assertEqual(user.get('name'), user.get('username'))
+
+
+class RestfulTestCase(rest.RestfulTestCase):
+
+ def setUp(self):
+ super(RestfulTestCase, self).setUp()
+
+ # TODO(termie): add an admin user to the fixtures and use that user
+ # override the fixtures, for now
+ self.assignment_api.add_role_to_user_and_project(
+ self.user_foo['id'],
+ self.tenant_bar['id'],
+ self.role_admin['id'])
+
+
+class V2TestCase(RestfulTestCase, CoreApiTests, LegacyV2UsernameTests):
+ def _get_user_id(self, r):
+ return r['user']['id']
+
+ def _get_role_name(self, r):
+ return r['roles'][0]['name']
+
+ def _get_role_id(self, r):
+ return r['roles'][0]['id']
+
+ def _get_project_id(self, r):
+ return r['tenant']['id']
+
+ def _get_token_id(self, r):
+ return r.result['access']['token']['id']
+
+ def assertNoRoles(self, r):
+ self.assertEqual([], r['roles'])
+
+ def assertValidErrorResponse(self, r):
+ self.assertIsNotNone(r.result.get('error'))
+ self.assertValidError(r.result['error'])
+ self.assertEqual(r.result['error']['code'], r.status_code)
+
+ def assertValidExtension(self, extension, expected):
+ super(V2TestCase, self).assertValidExtension(extension)
+ descriptions = [ext['description'] for ext in six.itervalues(expected)]
+ description = extension.get('description')
+ self.assertIsNotNone(description)
+ self.assertIn(description, descriptions)
+ self.assertIsNotNone(extension.get('links'))
+ self.assertNotEmpty(extension.get('links'))
+ for link in extension.get('links'):
+ self.assertValidExtensionLink(link)
+
+ def assertValidExtensionListResponse(self, r, expected):
+ self.assertIsNotNone(r.result.get('extensions'))
+ self.assertIsNotNone(r.result['extensions'].get('values'))
+ self.assertNotEmpty(r.result['extensions'].get('values'))
+ for extension in r.result['extensions']['values']:
+ self.assertValidExtension(extension, expected)
+
+ def assertValidExtensionResponse(self, r, expected):
+ self.assertValidExtension(r.result.get('extension'), expected)
+
+ def assertValidUser(self, user):
+ super(V2TestCase, self).assertValidUser(user)
+ self.assertNotIn('default_project_id', user)
+ if 'tenantId' in user:
+ # NOTE(morganfainberg): tenantId should never be "None", it gets
+ # filtered out of the object if it is there. This is suspenders
+ # and a belt check to avoid unintended regressions.
+ self.assertIsNotNone(user.get('tenantId'))
+
+ def assertValidAuthenticationResponse(self, r,
+ require_service_catalog=False):
+ self.assertIsNotNone(r.result.get('access'))
+ self.assertIsNotNone(r.result['access'].get('token'))
+ self.assertIsNotNone(r.result['access'].get('user'))
+
+ # validate token
+ self.assertIsNotNone(r.result['access']['token'].get('id'))
+ self.assertIsNotNone(r.result['access']['token'].get('expires'))
+ tenant = r.result['access']['token'].get('tenant')
+ if tenant is not None:
+ # validate tenant
+ self.assertIsNotNone(tenant.get('id'))
+ self.assertIsNotNone(tenant.get('name'))
+
+ # validate user
+ self.assertIsNotNone(r.result['access']['user'].get('id'))
+ self.assertIsNotNone(r.result['access']['user'].get('name'))
+
+ if require_service_catalog:
+ # roles are only provided with a service catalog
+ roles = r.result['access']['user'].get('roles')
+ self.assertNotEmpty(roles)
+ for role in roles:
+ self.assertIsNotNone(role.get('name'))
+
+ serviceCatalog = r.result['access'].get('serviceCatalog')
+ # validate service catalog
+ if require_service_catalog:
+ self.assertIsNotNone(serviceCatalog)
+ if serviceCatalog is not None:
+ self.assertIsInstance(serviceCatalog, list)
+ if require_service_catalog:
+ self.assertNotEmpty(serviceCatalog)
+ for service in r.result['access']['serviceCatalog']:
+ # validate service
+ self.assertIsNotNone(service.get('name'))
+ self.assertIsNotNone(service.get('type'))
+
+ # services contain at least one endpoint
+ self.assertIsNotNone(service.get('endpoints'))
+ self.assertNotEmpty(service['endpoints'])
+ for endpoint in service['endpoints']:
+ # validate service endpoint
+ self.assertIsNotNone(endpoint.get('publicURL'))
+
+ def assertValidTenantListResponse(self, r):
+ self.assertIsNotNone(r.result.get('tenants'))
+ self.assertNotEmpty(r.result['tenants'])
+ for tenant in r.result['tenants']:
+ self.assertValidTenant(tenant)
+ self.assertIsNotNone(tenant.get('enabled'))
+ self.assertIn(tenant.get('enabled'), [True, False])
+
+ def assertValidUserResponse(self, r):
+ self.assertIsNotNone(r.result.get('user'))
+ self.assertValidUser(r.result['user'])
+
+ def assertValidTenantResponse(self, r):
+ self.assertIsNotNone(r.result.get('tenant'))
+ self.assertValidTenant(r.result['tenant'])
+
+ def assertValidRoleListResponse(self, r):
+ self.assertIsNotNone(r.result.get('roles'))
+ self.assertNotEmpty(r.result['roles'])
+ for role in r.result['roles']:
+ self.assertValidRole(role)
+
+ def assertValidVersion(self, version):
+ super(V2TestCase, self).assertValidVersion(version)
+
+ self.assertIsNotNone(version.get('links'))
+ self.assertNotEmpty(version.get('links'))
+ for link in version.get('links'):
+ self.assertIsNotNone(link.get('rel'))
+ self.assertIsNotNone(link.get('href'))
+
+ self.assertIsNotNone(version.get('media-types'))
+ self.assertNotEmpty(version.get('media-types'))
+ for media in version.get('media-types'):
+ self.assertIsNotNone(media.get('base'))
+ self.assertIsNotNone(media.get('type'))
+
+ def assertValidMultipleChoiceResponse(self, r):
+ self.assertIsNotNone(r.result.get('versions'))
+ self.assertIsNotNone(r.result['versions'].get('values'))
+ self.assertNotEmpty(r.result['versions']['values'])
+ for version in r.result['versions']['values']:
+ self.assertValidVersion(version)
+
+ def assertValidVersionResponse(self, r):
+ self.assertValidVersion(r.result.get('version'))
+
+ def assertValidEndpointListResponse(self, r):
+ self.assertIsNotNone(r.result.get('endpoints'))
+ self.assertNotEmpty(r.result['endpoints'])
+ for endpoint in r.result['endpoints']:
+ self.assertIsNotNone(endpoint.get('id'))
+ self.assertIsNotNone(endpoint.get('name'))
+ self.assertIsNotNone(endpoint.get('type'))
+ self.assertIsNotNone(endpoint.get('publicURL'))
+ self.assertIsNotNone(endpoint.get('internalURL'))
+ self.assertIsNotNone(endpoint.get('adminURL'))
+
+ def get_user_from_response(self, r):
+ return r.result.get('user')
+
+ def get_user_attribute_from_response(self, r, attribute_name):
+ return r.result['user'][attribute_name]
+
+ def test_service_crud_requires_auth(self):
+ """Service CRUD should 401 without an X-Auth-Token (bug 1006822)."""
+ # values here don't matter because we should 401 before they're checked
+ service_path = '/v2.0/OS-KSADM/services/%s' % uuid.uuid4().hex
+ service_body = {
+ 'OS-KSADM:service': {
+ 'name': uuid.uuid4().hex,
+ 'type': uuid.uuid4().hex,
+ },
+ }
+
+ r = self.admin_request(method='GET',
+ path='/v2.0/OS-KSADM/services',
+ expected_status=401)
+ self.assertValidErrorResponse(r)
+
+ r = self.admin_request(method='POST',
+ path='/v2.0/OS-KSADM/services',
+ body=service_body,
+ expected_status=401)
+ self.assertValidErrorResponse(r)
+
+ r = self.admin_request(method='GET',
+ path=service_path,
+ expected_status=401)
+ self.assertValidErrorResponse(r)
+
+ r = self.admin_request(method='DELETE',
+ path=service_path,
+ expected_status=401)
+ self.assertValidErrorResponse(r)
+
+ def test_user_role_list_requires_auth(self):
+ """User role list should 401 without an X-Auth-Token (bug 1006815)."""
+ # values here don't matter because we should 401 before they're checked
+ path = '/v2.0/tenants/%(tenant_id)s/users/%(user_id)s/roles' % {
+ 'tenant_id': uuid.uuid4().hex,
+ 'user_id': uuid.uuid4().hex,
+ }
+
+ r = self.admin_request(path=path, expected_status=401)
+ self.assertValidErrorResponse(r)
+
+ def test_fetch_revocation_list_nonadmin_fails(self):
+ self.admin_request(
+ method='GET',
+ path='/v2.0/tokens/revoked',
+ expected_status=401)
+
+ def test_fetch_revocation_list_admin_200(self):
+ token = self.get_scoped_token()
+ r = self.admin_request(
+ method='GET',
+ path='/v2.0/tokens/revoked',
+ token=token,
+ expected_status=200)
+ self.assertValidRevocationListResponse(r)
+
+ def assertValidRevocationListResponse(self, response):
+ self.assertIsNotNone(response.result['signed'])
+
+ def _fetch_parse_revocation_list(self):
+
+ token1 = self.get_scoped_token()
+
+ # TODO(morganfainberg): Because this is making a restful call to the
+ # app a change to UTCNOW via mock.patch will not affect the returned
+ # token. The only surefire way to ensure there is not a transient bug
+ # based upon when the second token is issued is with a sleep. This
+ # issue all stems from the limited resolution (no microseconds) on the
+ # expiry time of tokens and the way revocation events utilizes token
+ # expiry to revoke individual tokens. This is a stop-gap until all
+ # associated issues with resolution on expiration and revocation events
+ # are resolved.
+ time.sleep(1)
+
+ token2 = self.get_scoped_token()
+
+ self.admin_request(method='DELETE',
+ path='/v2.0/tokens/%s' % token2,
+ token=token1)
+
+ r = self.admin_request(
+ method='GET',
+ path='/v2.0/tokens/revoked',
+ token=token1,
+ expected_status=200)
+ signed_text = r.result['signed']
+
+ data_json = cms.cms_verify(signed_text, CONF.signing.certfile,
+ CONF.signing.ca_certs)
+
+ data = json.loads(data_json)
+
+ return (data, token2)
+
+ def test_fetch_revocation_list_md5(self):
+ """If the server is configured for md5, then the revocation list has
+ tokens hashed with MD5.
+ """
+
+ # The default hash algorithm is md5.
+ hash_algorithm = 'md5'
+
+ (data, token) = self._fetch_parse_revocation_list()
+ token_hash = cms.cms_hash_token(token, mode=hash_algorithm)
+ self.assertThat(token_hash, matchers.Equals(data['revoked'][0]['id']))
+
+ def test_fetch_revocation_list_sha256(self):
+ """If the server is configured for sha256, then the revocation list has
+ tokens hashed with SHA256
+ """
+
+ hash_algorithm = 'sha256'
+ self.config_fixture.config(group='token',
+ hash_algorithm=hash_algorithm)
+
+ (data, token) = self._fetch_parse_revocation_list()
+ token_hash = cms.cms_hash_token(token, mode=hash_algorithm)
+ self.assertThat(token_hash, matchers.Equals(data['revoked'][0]['id']))
+
+ def test_create_update_user_invalid_enabled_type(self):
+ # Enforce usage of boolean for 'enabled' field
+ token = self.get_scoped_token()
+
+ # Test CREATE request
+ r = self.admin_request(
+ method='POST',
+ path='/v2.0/users',
+ body={
+ 'user': {
+ 'name': uuid.uuid4().hex,
+ 'password': uuid.uuid4().hex,
+ # In JSON, "true|false" are not boolean
+ 'enabled': "true",
+ },
+ },
+ token=token,
+ expected_status=400)
+ self.assertValidErrorResponse(r)
+
+ # Test UPDATE request
+ r = self.admin_request(
+ method='PUT',
+ path='/v2.0/users/%(user_id)s' % {
+ 'user_id': self.user_foo['id'],
+ },
+ body={
+ 'user': {
+ # In JSON, "true|false" are not boolean
+ 'enabled': "true",
+ },
+ },
+ token=token,
+ expected_status=400)
+ self.assertValidErrorResponse(r)
+
+ def test_authenticating_a_user_with_an_OSKSADM_password(self):
+ token = self.get_scoped_token()
+
+ username = uuid.uuid4().hex
+ password = uuid.uuid4().hex
+
+ # create the user
+ r = self.admin_request(
+ method='POST',
+ path='/v2.0/users',
+ body={
+ 'user': {
+ 'name': username,
+ 'OS-KSADM:password': password,
+ 'enabled': True,
+ },
+ },
+ token=token)
+
+ # successfully authenticate
+ self.public_request(
+ method='POST',
+ path='/v2.0/tokens',
+ body={
+ 'auth': {
+ 'passwordCredentials': {
+ 'username': username,
+ 'password': password,
+ },
+ },
+ },
+ expected_status=200)
+
+ # ensure password doesn't leak
+ user_id = r.result['user']['id']
+ r = self.admin_request(
+ method='GET',
+ path='/v2.0/users/%s' % user_id,
+ token=token,
+ expected_status=200)
+ self.assertNotIn('OS-KSADM:password', r.result['user'])
+
+ def test_updating_a_user_with_an_OSKSADM_password(self):
+ token = self.get_scoped_token()
+
+ user_id = self.user_foo['id']
+ password = uuid.uuid4().hex
+
+ # update the user
+ self.admin_request(
+ method='PUT',
+ path='/v2.0/users/%s/OS-KSADM/password' % user_id,
+ body={
+ 'user': {
+ 'password': password,
+ },
+ },
+ token=token,
+ expected_status=200)
+
+ # successfully authenticate
+ self.public_request(
+ method='POST',
+ path='/v2.0/tokens',
+ body={
+ 'auth': {
+ 'passwordCredentials': {
+ 'username': self.user_foo['name'],
+ 'password': password,
+ },
+ },
+ },
+ expected_status=200)
+
+
+class RevokeApiTestCase(V2TestCase):
+ def config_overrides(self):
+ super(RevokeApiTestCase, self).config_overrides()
+ self.config_fixture.config(
+ group='revoke',
+ driver='keystone.contrib.revoke.backends.kvs.Revoke')
+ self.config_fixture.config(
+ group='token',
+ provider='keystone.token.providers.pki.Provider',
+ revoke_by_id=False)
+
+ def test_fetch_revocation_list_admin_200(self):
+ self.skipTest('Revoke API disables revocation_list.')
+
+ def test_fetch_revocation_list_md5(self):
+ self.skipTest('Revoke API disables revocation_list.')
+
+ def test_fetch_revocation_list_sha256(self):
+ self.skipTest('Revoke API disables revocation_list.')
+
+
+class TestFernetTokenProviderV2(RestfulTestCase):
+
+ def setUp(self):
+ super(TestFernetTokenProviderV2, self).setUp()
+ self.useFixture(ksfixtures.KeyRepository(self.config_fixture))
+
+ # Used by RestfulTestCase
+ def _get_token_id(self, r):
+ return r.result['access']['token']['id']
+
+ def new_project_ref(self):
+ return {'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'description': uuid.uuid4().hex,
+ 'domain_id': 'default',
+ 'enabled': True}
+
+ def config_overrides(self):
+ super(TestFernetTokenProviderV2, self).config_overrides()
+ self.config_fixture.config(
+ group='token',
+ provider='keystone.token.providers.fernet.Provider')
+
+ def test_authenticate_unscoped_token(self):
+ unscoped_token = self.get_unscoped_token()
+ # Fernet token must be of length 255 per usability requirements
+ self.assertLess(len(unscoped_token), 255)
+
+ def test_validate_unscoped_token(self):
+ # Grab an admin token to validate with
+ project_ref = self.new_project_ref()
+ self.resource_api.create_project(project_ref['id'], project_ref)
+ self.assignment_api.add_role_to_user_and_project(self.user_foo['id'],
+ project_ref['id'],
+ self.role_admin['id'])
+ admin_token = self.get_scoped_token(tenant_id=project_ref['id'])
+ unscoped_token = self.get_unscoped_token()
+ path = ('/v2.0/tokens/%s' % unscoped_token)
+ self.admin_request(
+ method='GET',
+ path=path,
+ token=admin_token,
+ expected_status=200)
+
+ def test_authenticate_scoped_token(self):
+ project_ref = self.new_project_ref()
+ self.resource_api.create_project(project_ref['id'], project_ref)
+ self.assignment_api.add_role_to_user_and_project(
+ self.user_foo['id'], project_ref['id'], self.role_service['id'])
+ token = self.get_scoped_token(tenant_id=project_ref['id'])
+ # Fernet token must be of length 255 per usability requirements
+ self.assertLess(len(token), 255)
+
+ def test_validate_scoped_token(self):
+ project_ref = self.new_project_ref()
+ self.resource_api.create_project(project_ref['id'], project_ref)
+ self.assignment_api.add_role_to_user_and_project(self.user_foo['id'],
+ project_ref['id'],
+ self.role_admin['id'])
+ project2_ref = self.new_project_ref()
+ self.resource_api.create_project(project2_ref['id'], project2_ref)
+ self.assignment_api.add_role_to_user_and_project(
+ self.user_foo['id'], project2_ref['id'], self.role_member['id'])
+ admin_token = self.get_scoped_token(tenant_id=project_ref['id'])
+ member_token = self.get_scoped_token(tenant_id=project2_ref['id'])
+ path = ('/v2.0/tokens/%s?belongsTo=%s' % (member_token,
+ project2_ref['id']))
+ # Validate token belongs to project
+ self.admin_request(
+ method='GET',
+ path=path,
+ token=admin_token,
+ expected_status=200)
+
+ def test_token_authentication_and_validation(self):
+ """Test token authentication for Fernet token provider.
+
+ Verify that token authentication returns validate response code and
+ valid token belongs to project.
+ """
+ project_ref = self.new_project_ref()
+ self.resource_api.create_project(project_ref['id'], project_ref)
+ unscoped_token = self.get_unscoped_token()
+ self.assignment_api.add_role_to_user_and_project(self.user_foo['id'],
+ project_ref['id'],
+ self.role_admin['id'])
+ r = self.public_request(
+ method='POST',
+ path='/v2.0/tokens',
+ body={
+ 'auth': {
+ 'tenantName': project_ref['name'],
+ 'token': {
+ 'id': unscoped_token.encode('ascii')
+ }
+ }
+ },
+ expected_status=200)
+
+ token_id = self._get_token_id(r)
+ path = ('/v2.0/tokens/%s?belongsTo=%s' % (token_id, project_ref['id']))
+ # Validate token belongs to project
+ self.admin_request(
+ method='GET',
+ path=path,
+ token=CONF.admin_token,
+ expected_status=200)
diff --git a/keystone-moon/keystone/tests/unit/test_v2_controller.py b/keystone-moon/keystone/tests/unit/test_v2_controller.py
new file mode 100644
index 00000000..6c1edd0a
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/test_v2_controller.py
@@ -0,0 +1,95 @@
+# Copyright 2014 IBM Corp.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+
+import uuid
+
+from keystone.assignment import controllers as assignment_controllers
+from keystone.resource import controllers as resource_controllers
+from keystone.tests import unit as tests
+from keystone.tests.unit import default_fixtures
+from keystone.tests.unit.ksfixtures import database
+
+
+_ADMIN_CONTEXT = {'is_admin': True, 'query_string': {}}
+
+
+class TenantTestCase(tests.TestCase):
+ """Tests for the V2 Tenant controller.
+
+ These tests exercise :class:`keystone.assignment.controllers.Tenant`.
+
+ """
+ def setUp(self):
+ super(TenantTestCase, self).setUp()
+ self.useFixture(database.Database())
+ self.load_backends()
+ self.load_fixtures(default_fixtures)
+ self.tenant_controller = resource_controllers.Tenant()
+ self.assignment_tenant_controller = (
+ assignment_controllers.TenantAssignment())
+ self.assignment_role_controller = (
+ assignment_controllers.RoleAssignmentV2())
+
+ def test_get_project_users_no_user(self):
+ """get_project_users when user doesn't exist.
+
+ When a user that's not known to `identity` has a role on a project,
+ then `get_project_users` just skips that user.
+
+ """
+ project_id = self.tenant_bar['id']
+
+ orig_project_users = (
+ self.assignment_tenant_controller.get_project_users(_ADMIN_CONTEXT,
+ project_id))
+
+ # Assign a role to a user that doesn't exist to the `bar` project.
+
+ user_id = uuid.uuid4().hex
+ self.assignment_role_controller.add_role_to_user(
+ _ADMIN_CONTEXT, user_id, self.role_other['id'], project_id)
+
+ new_project_users = (
+ self.assignment_tenant_controller.get_project_users(_ADMIN_CONTEXT,
+ project_id))
+
+ # The new user isn't included in the result, so no change.
+ # asserting that the expected values appear in the list,
+ # without asserting the order of the results
+ self.assertEqual(sorted(orig_project_users), sorted(new_project_users))
+
+ def test_list_projects_default_domain(self):
+ """Test that list projects only returns those in the default domain."""
+
+ domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
+ 'enabled': True}
+ self.resource_api.create_domain(domain['id'], domain)
+ project1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
+ 'domain_id': domain['id']}
+ self.resource_api.create_project(project1['id'], project1)
+ # Check the real total number of projects, we should have the above
+ # plus those in the default features
+ refs = self.resource_api.list_projects()
+ self.assertEqual(len(default_fixtures.TENANTS) + 1, len(refs))
+
+ # Now list all projects using the v2 API - we should only get
+ # back those in the default features, since only those are in the
+ # default domain.
+ refs = self.tenant_controller.get_all_projects(_ADMIN_CONTEXT)
+ self.assertEqual(len(default_fixtures.TENANTS), len(refs['tenants']))
+ for tenant in default_fixtures.TENANTS:
+ tenant_copy = tenant.copy()
+ tenant_copy.pop('domain_id')
+ self.assertIn(tenant_copy, refs['tenants'])
diff --git a/keystone-moon/keystone/tests/unit/test_v2_keystoneclient.py b/keystone-moon/keystone/tests/unit/test_v2_keystoneclient.py
new file mode 100644
index 00000000..7abc5bc4
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/test_v2_keystoneclient.py
@@ -0,0 +1,1045 @@
+# Copyright 2012 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import datetime
+import uuid
+
+from keystoneclient import exceptions as client_exceptions
+from keystoneclient.v2_0 import client as ks_client
+import mock
+from oslo_config import cfg
+from oslo_serialization import jsonutils
+from oslo_utils import timeutils
+import webob
+
+from keystone.tests import unit as tests
+from keystone.tests.unit import default_fixtures
+from keystone.tests.unit.ksfixtures import appserver
+from keystone.tests.unit.ksfixtures import database
+
+
+CONF = cfg.CONF
+DEFAULT_DOMAIN_ID = CONF.identity.default_domain_id
+
+
+class ClientDrivenTestCase(tests.TestCase):
+
+ def setUp(self):
+ super(ClientDrivenTestCase, self).setUp()
+
+ # FIXME(morganfainberg): Since we are running tests through the
+ # controllers and some internal api drivers are SQL-only, the correct
+ # approach is to ensure we have the correct backing store. The
+ # credential api makes some very SQL specific assumptions that should
+ # be addressed allowing for non-SQL based testing to occur.
+ self.useFixture(database.Database())
+ self.load_backends()
+
+ self.load_fixtures(default_fixtures)
+
+ # TODO(termie): add an admin user to the fixtures and use that user
+ # override the fixtures, for now
+ self.assignment_api.add_role_to_user_and_project(
+ self.user_foo['id'],
+ self.tenant_bar['id'],
+ self.role_admin['id'])
+
+ conf = self._paste_config('keystone')
+ fixture = self.useFixture(appserver.AppServer(conf, appserver.MAIN))
+ self.public_server = fixture.server
+ fixture = self.useFixture(appserver.AppServer(conf, appserver.ADMIN))
+ self.admin_server = fixture.server
+
+ self.addCleanup(self.cleanup_instance('public_server', 'admin_server'))
+
+ def _public_url(self):
+ public_port = self.public_server.socket_info['socket'][1]
+ return "http://localhost:%s/v2.0" % public_port
+
+ def _admin_url(self):
+ admin_port = self.admin_server.socket_info['socket'][1]
+ return "http://localhost:%s/v2.0" % admin_port
+
+ def _client(self, admin=False, **kwargs):
+ url = self._admin_url() if admin else self._public_url()
+ kc = ks_client.Client(endpoint=url,
+ auth_url=self._public_url(),
+ **kwargs)
+ kc.authenticate()
+ # have to manually overwrite the management url after authentication
+ kc.management_url = url
+ return kc
+
+ def get_client(self, user_ref=None, tenant_ref=None, admin=False):
+ if user_ref is None:
+ user_ref = self.user_foo
+ if tenant_ref is None:
+ for user in default_fixtures.USERS:
+ # The fixture ID is no longer used as the ID in the database
+ # The fixture ID, however, is still used as part of the
+ # attribute name when storing the created object on the test
+ # case. This means that we need to use the fixture ID below to
+ # find the actial object so that we can get the ID as stored
+ # in the database to compare against.
+ if (getattr(self, 'user_%s' % user['id'])['id'] ==
+ user_ref['id']):
+ tenant_id = user['tenants'][0]
+ else:
+ tenant_id = tenant_ref['id']
+
+ return self._client(username=user_ref['name'],
+ password=user_ref['password'],
+ tenant_id=tenant_id,
+ admin=admin)
+
+ def test_authenticate_tenant_name_and_tenants(self):
+ client = self.get_client()
+ tenants = client.tenants.list()
+ self.assertEqual(self.tenant_bar['id'], tenants[0].id)
+
+ def test_authenticate_tenant_id_and_tenants(self):
+ client = self._client(username=self.user_foo['name'],
+ password=self.user_foo['password'],
+ tenant_id='bar')
+ tenants = client.tenants.list()
+ self.assertEqual(self.tenant_bar['id'], tenants[0].id)
+
+ def test_authenticate_invalid_tenant_id(self):
+ self.assertRaises(client_exceptions.Unauthorized,
+ self._client,
+ username=self.user_foo['name'],
+ password=self.user_foo['password'],
+ tenant_id='baz')
+
+ def test_authenticate_token_no_tenant(self):
+ client = self.get_client()
+ token = client.auth_token
+ token_client = self._client(token=token)
+ tenants = token_client.tenants.list()
+ self.assertEqual(self.tenant_bar['id'], tenants[0].id)
+
+ def test_authenticate_token_tenant_id(self):
+ client = self.get_client()
+ token = client.auth_token
+ token_client = self._client(token=token, tenant_id='bar')
+ tenants = token_client.tenants.list()
+ self.assertEqual(self.tenant_bar['id'], tenants[0].id)
+
+ def test_authenticate_token_invalid_tenant_id(self):
+ client = self.get_client()
+ token = client.auth_token
+ self.assertRaises(client_exceptions.Unauthorized,
+ self._client, token=token,
+ tenant_id=uuid.uuid4().hex)
+
+ def test_authenticate_token_invalid_tenant_name(self):
+ client = self.get_client()
+ token = client.auth_token
+ self.assertRaises(client_exceptions.Unauthorized,
+ self._client, token=token,
+ tenant_name=uuid.uuid4().hex)
+
+ def test_authenticate_token_tenant_name(self):
+ client = self.get_client()
+ token = client.auth_token
+ token_client = self._client(token=token, tenant_name='BAR')
+ tenants = token_client.tenants.list()
+ self.assertEqual(self.tenant_bar['id'], tenants[0].id)
+ self.assertEqual(self.tenant_bar['id'], tenants[0].id)
+
+ def test_authenticate_and_delete_token(self):
+ client = self.get_client(admin=True)
+ token = client.auth_token
+ token_client = self._client(token=token)
+ tenants = token_client.tenants.list()
+ self.assertEqual(self.tenant_bar['id'], tenants[0].id)
+
+ client.tokens.delete(token_client.auth_token)
+
+ self.assertRaises(client_exceptions.Unauthorized,
+ token_client.tenants.list)
+
+ def test_authenticate_no_password(self):
+ user_ref = self.user_foo.copy()
+ user_ref['password'] = None
+ self.assertRaises(client_exceptions.AuthorizationFailure,
+ self.get_client,
+ user_ref)
+
+ def test_authenticate_no_username(self):
+ user_ref = self.user_foo.copy()
+ user_ref['name'] = None
+ self.assertRaises(client_exceptions.AuthorizationFailure,
+ self.get_client,
+ user_ref)
+
+ def test_authenticate_disabled_tenant(self):
+ admin_client = self.get_client(admin=True)
+
+ tenant = {
+ 'name': uuid.uuid4().hex,
+ 'description': uuid.uuid4().hex,
+ 'enabled': False,
+ }
+ tenant_ref = admin_client.tenants.create(
+ tenant_name=tenant['name'],
+ description=tenant['description'],
+ enabled=tenant['enabled'])
+ tenant['id'] = tenant_ref.id
+
+ user = {
+ 'name': uuid.uuid4().hex,
+ 'password': uuid.uuid4().hex,
+ 'email': uuid.uuid4().hex,
+ 'tenant_id': tenant['id'],
+ }
+ user_ref = admin_client.users.create(
+ name=user['name'],
+ password=user['password'],
+ email=user['email'],
+ tenant_id=user['tenant_id'])
+ user['id'] = user_ref.id
+
+ # password authentication
+ self.assertRaises(
+ client_exceptions.Unauthorized,
+ self._client,
+ username=user['name'],
+ password=user['password'],
+ tenant_id=tenant['id'])
+
+ # token authentication
+ client = self._client(
+ username=user['name'],
+ password=user['password'])
+ self.assertRaises(
+ client_exceptions.Unauthorized,
+ self._client,
+ token=client.auth_token,
+ tenant_id=tenant['id'])
+
+ # FIXME(ja): this test should require the "keystone:admin" roled
+ # (probably the role set via --keystone_admin_role flag)
+ # FIXME(ja): add a test that admin endpoint is only sent to admin user
+ # FIXME(ja): add a test that admin endpoint returns unauthorized if not
+ # admin
+ def test_tenant_create_update_and_delete(self):
+ tenant_name = 'original_tenant'
+ tenant_description = 'My original tenant!'
+ tenant_enabled = True
+ client = self.get_client(admin=True)
+
+ # create, get, and list a tenant
+ tenant = client.tenants.create(tenant_name=tenant_name,
+ description=tenant_description,
+ enabled=tenant_enabled)
+ self.assertEqual(tenant_name, tenant.name)
+ self.assertEqual(tenant_description, tenant.description)
+ self.assertEqual(tenant_enabled, tenant.enabled)
+
+ tenant = client.tenants.get(tenant_id=tenant.id)
+ self.assertEqual(tenant_name, tenant.name)
+ self.assertEqual(tenant_description, tenant.description)
+ self.assertEqual(tenant_enabled, tenant.enabled)
+
+ tenant = [t for t in client.tenants.list() if t.id == tenant.id].pop()
+ self.assertEqual(tenant_name, tenant.name)
+ self.assertEqual(tenant_description, tenant.description)
+ self.assertEqual(tenant_enabled, tenant.enabled)
+
+ # update, get, and list a tenant
+ tenant_name = 'updated_tenant'
+ tenant_description = 'Updated tenant!'
+ tenant_enabled = False
+ tenant = client.tenants.update(tenant_id=tenant.id,
+ tenant_name=tenant_name,
+ enabled=tenant_enabled,
+ description=tenant_description)
+ self.assertEqual(tenant_name, tenant.name)
+ self.assertEqual(tenant_description, tenant.description)
+ self.assertEqual(tenant_enabled, tenant.enabled)
+
+ tenant = client.tenants.get(tenant_id=tenant.id)
+ self.assertEqual(tenant_name, tenant.name)
+ self.assertEqual(tenant_description, tenant.description)
+ self.assertEqual(tenant_enabled, tenant.enabled)
+
+ tenant = [t for t in client.tenants.list() if t.id == tenant.id].pop()
+ self.assertEqual(tenant_name, tenant.name)
+ self.assertEqual(tenant_description, tenant.description)
+ self.assertEqual(tenant_enabled, tenant.enabled)
+
+ # delete, get, and list a tenant
+ client.tenants.delete(tenant=tenant.id)
+ self.assertRaises(client_exceptions.NotFound, client.tenants.get,
+ tenant.id)
+ self.assertFalse([t for t in client.tenants.list()
+ if t.id == tenant.id])
+
+ def test_tenant_create_update_and_delete_unicode(self):
+ tenant_name = u'original \u540d\u5b57'
+ tenant_description = 'My original tenant!'
+ tenant_enabled = True
+ client = self.get_client(admin=True)
+
+ # create, get, and list a tenant
+ tenant = client.tenants.create(tenant_name,
+ description=tenant_description,
+ enabled=tenant_enabled)
+ self.assertEqual(tenant_name, tenant.name)
+ self.assertEqual(tenant_description, tenant.description)
+ self.assertIs(tenant.enabled, tenant_enabled)
+
+ tenant = client.tenants.get(tenant.id)
+ self.assertEqual(tenant_name, tenant.name)
+ self.assertEqual(tenant_description, tenant.description)
+ self.assertIs(tenant.enabled, tenant_enabled)
+
+ # multiple tenants exist due to fixtures, so find the one we're testing
+ tenant = [t for t in client.tenants.list() if t.id == tenant.id].pop()
+ self.assertEqual(tenant_name, tenant.name)
+ self.assertEqual(tenant_description, tenant.description)
+ self.assertIs(tenant.enabled, tenant_enabled)
+
+ # update, get, and list a tenant
+ tenant_name = u'updated \u540d\u5b57'
+ tenant_description = 'Updated tenant!'
+ tenant_enabled = False
+ tenant = client.tenants.update(tenant.id,
+ tenant_name=tenant_name,
+ enabled=tenant_enabled,
+ description=tenant_description)
+ self.assertEqual(tenant_name, tenant.name)
+ self.assertEqual(tenant_description, tenant.description)
+ self.assertIs(tenant.enabled, tenant_enabled)
+
+ tenant = client.tenants.get(tenant.id)
+ self.assertEqual(tenant_name, tenant.name)
+ self.assertEqual(tenant_description, tenant.description)
+ self.assertIs(tenant.enabled, tenant_enabled)
+
+ tenant = [t for t in client.tenants.list() if t.id == tenant.id].pop()
+ self.assertEqual(tenant_name, tenant.name)
+ self.assertEqual(tenant_description, tenant.description)
+ self.assertIs(tenant.enabled, tenant_enabled)
+
+ # delete, get, and list a tenant
+ client.tenants.delete(tenant.id)
+ self.assertRaises(client_exceptions.NotFound, client.tenants.get,
+ tenant.id)
+ self.assertFalse([t for t in client.tenants.list()
+ if t.id == tenant.id])
+
+ def test_tenant_create_no_name(self):
+ client = self.get_client(admin=True)
+ self.assertRaises(client_exceptions.BadRequest,
+ client.tenants.create,
+ tenant_name="")
+
+ def test_tenant_delete_404(self):
+ client = self.get_client(admin=True)
+ self.assertRaises(client_exceptions.NotFound,
+ client.tenants.delete,
+ tenant=uuid.uuid4().hex)
+
+ def test_tenant_get_404(self):
+ client = self.get_client(admin=True)
+ self.assertRaises(client_exceptions.NotFound,
+ client.tenants.get,
+ tenant_id=uuid.uuid4().hex)
+
+ def test_tenant_update_404(self):
+ client = self.get_client(admin=True)
+ self.assertRaises(client_exceptions.NotFound,
+ client.tenants.update,
+ tenant_id=uuid.uuid4().hex)
+
+ def test_tenant_list(self):
+ client = self.get_client()
+ tenants = client.tenants.list()
+ self.assertEqual(1, len(tenants))
+
+ # Admin endpoint should return *all* tenants
+ client = self.get_client(admin=True)
+ tenants = client.tenants.list()
+ self.assertEqual(len(default_fixtures.TENANTS), len(tenants))
+
+ def test_invalid_password(self):
+ good_client = self._client(username=self.user_foo['name'],
+ password=self.user_foo['password'])
+ good_client.tenants.list()
+
+ self.assertRaises(client_exceptions.Unauthorized,
+ self._client,
+ username=self.user_foo['name'],
+ password=uuid.uuid4().hex)
+
+ def test_invalid_user_and_password(self):
+ self.assertRaises(client_exceptions.Unauthorized,
+ self._client,
+ username=uuid.uuid4().hex,
+ password=uuid.uuid4().hex)
+
+ def test_change_password_invalidates_token(self):
+ admin_client = self.get_client(admin=True)
+
+ username = uuid.uuid4().hex
+ password = uuid.uuid4().hex
+ user = admin_client.users.create(name=username, password=password,
+ email=uuid.uuid4().hex)
+
+ # auth as user should work before a password change
+ client = self._client(username=username, password=password)
+
+ # auth as user with a token should work before a password change
+ self._client(token=client.auth_token)
+
+ # administrative password reset
+ admin_client.users.update_password(
+ user=user.id,
+ password=uuid.uuid4().hex)
+
+ # auth as user with original password should not work after change
+ self.assertRaises(client_exceptions.Unauthorized,
+ self._client,
+ username=username,
+ password=password)
+
+ # authenticate with an old token should not work after change
+ self.assertRaises(client_exceptions.Unauthorized,
+ self._client,
+ token=client.auth_token)
+
+ def test_user_change_own_password_invalidates_token(self):
+ # bootstrap a user as admin
+ client = self.get_client(admin=True)
+ username = uuid.uuid4().hex
+ password = uuid.uuid4().hex
+ client.users.create(name=username, password=password,
+ email=uuid.uuid4().hex)
+
+ # auth as user should work before a password change
+ client = self._client(username=username, password=password)
+
+ # auth as user with a token should work before a password change
+ self._client(token=client.auth_token)
+
+ # change the user's own password
+ # TODO(dolphm): This should NOT raise an HTTPError at all, but rather
+ # this should succeed with a 2xx. This 500 does not prevent the test
+ # from demonstrating the desired consequences below, though.
+ self.assertRaises(client_exceptions.HTTPError,
+ client.users.update_own_password,
+ password, uuid.uuid4().hex)
+
+ # auth as user with original password should not work after change
+ self.assertRaises(client_exceptions.Unauthorized,
+ self._client,
+ username=username,
+ password=password)
+
+ # auth as user with an old token should not work after change
+ self.assertRaises(client_exceptions.Unauthorized,
+ self._client,
+ token=client.auth_token)
+
+ def test_disable_tenant_invalidates_token(self):
+ admin_client = self.get_client(admin=True)
+ foo_client = self.get_client(self.user_foo)
+ tenant_bar = admin_client.tenants.get(self.tenant_bar['id'])
+
+ # Disable the tenant.
+ tenant_bar.update(enabled=False)
+
+ # Test that the token has been removed.
+ self.assertRaises(client_exceptions.Unauthorized,
+ foo_client.tokens.authenticate,
+ token=foo_client.auth_token)
+
+ # Test that the user access has been disabled.
+ self.assertRaises(client_exceptions.Unauthorized,
+ self.get_client,
+ self.user_foo)
+
+ def test_delete_tenant_invalidates_token(self):
+ admin_client = self.get_client(admin=True)
+ foo_client = self.get_client(self.user_foo)
+ tenant_bar = admin_client.tenants.get(self.tenant_bar['id'])
+
+ # Delete the tenant.
+ tenant_bar.delete()
+
+ # Test that the token has been removed.
+ self.assertRaises(client_exceptions.Unauthorized,
+ foo_client.tokens.authenticate,
+ token=foo_client.auth_token)
+
+ # Test that the user access has been disabled.
+ self.assertRaises(client_exceptions.Unauthorized,
+ self.get_client,
+ self.user_foo)
+
+ def test_disable_user_invalidates_token(self):
+ admin_client = self.get_client(admin=True)
+ foo_client = self.get_client(self.user_foo)
+
+ admin_client.users.update_enabled(user=self.user_foo['id'],
+ enabled=False)
+
+ self.assertRaises(client_exceptions.Unauthorized,
+ foo_client.tokens.authenticate,
+ token=foo_client.auth_token)
+
+ self.assertRaises(client_exceptions.Unauthorized,
+ self.get_client,
+ self.user_foo)
+
+ def test_delete_user_invalidates_token(self):
+ admin_client = self.get_client(admin=True)
+ client = self.get_client(admin=False)
+
+ username = uuid.uuid4().hex
+ password = uuid.uuid4().hex
+ user_id = admin_client.users.create(
+ name=username, password=password, email=uuid.uuid4().hex).id
+
+ token_id = client.tokens.authenticate(
+ username=username, password=password).id
+
+ # token should be usable before the user is deleted
+ client.tokens.authenticate(token=token_id)
+
+ admin_client.users.delete(user=user_id)
+
+ # authenticate with a token should not work after the user is deleted
+ self.assertRaises(client_exceptions.Unauthorized,
+ client.tokens.authenticate,
+ token=token_id)
+
+ @mock.patch.object(timeutils, 'utcnow')
+ def test_token_expiry_maintained(self, mock_utcnow):
+ now = datetime.datetime.utcnow()
+ mock_utcnow.return_value = now
+ foo_client = self.get_client(self.user_foo)
+
+ orig_token = foo_client.service_catalog.catalog['token']
+ mock_utcnow.return_value = now + datetime.timedelta(seconds=1)
+ reauthenticated_token = foo_client.tokens.authenticate(
+ token=foo_client.auth_token)
+
+ self.assertCloseEnoughForGovernmentWork(
+ timeutils.parse_isotime(orig_token['expires']),
+ timeutils.parse_isotime(reauthenticated_token.expires))
+
+ def test_user_create_update_delete(self):
+ test_username = 'new_user'
+ client = self.get_client(admin=True)
+ user = client.users.create(name=test_username,
+ password='password',
+ email='user1@test.com')
+ self.assertEqual(test_username, user.name)
+
+ user = client.users.get(user=user.id)
+ self.assertEqual(test_username, user.name)
+
+ user = client.users.update(user=user,
+ name=test_username,
+ email='user2@test.com')
+ self.assertEqual('user2@test.com', user.email)
+
+ # NOTE(termie): update_enabled doesn't return anything, probably a bug
+ client.users.update_enabled(user=user, enabled=False)
+ user = client.users.get(user.id)
+ self.assertFalse(user.enabled)
+
+ self.assertRaises(client_exceptions.Unauthorized,
+ self._client,
+ username=test_username,
+ password='password')
+ client.users.update_enabled(user, True)
+
+ user = client.users.update_password(user=user, password='password2')
+
+ self._client(username=test_username,
+ password='password2')
+
+ user = client.users.update_tenant(user=user, tenant='bar')
+ # TODO(ja): once keystonelight supports default tenant
+ # when you login without specifying tenant, the
+ # token should be scoped to tenant 'bar'
+
+ client.users.delete(user.id)
+ self.assertRaises(client_exceptions.NotFound, client.users.get,
+ user.id)
+
+ # Test creating a user with a tenant (auto-add to tenant)
+ user2 = client.users.create(name=test_username,
+ password='password',
+ email='user1@test.com',
+ tenant_id='bar')
+ self.assertEqual(test_username, user2.name)
+
+ def test_update_default_tenant_to_existing_value(self):
+ client = self.get_client(admin=True)
+
+ user = client.users.create(
+ name=uuid.uuid4().hex,
+ password=uuid.uuid4().hex,
+ email=uuid.uuid4().hex,
+ tenant_id=self.tenant_bar['id'])
+
+ # attempting to update the tenant with the existing value should work
+ user = client.users.update_tenant(
+ user=user, tenant=self.tenant_bar['id'])
+
+ def test_user_create_no_string_password(self):
+ client = self.get_client(admin=True)
+ self.assertRaises(client_exceptions.BadRequest,
+ client.users.create,
+ name='test_user',
+ password=12345,
+ email=uuid.uuid4().hex)
+
+ def test_user_create_no_name(self):
+ client = self.get_client(admin=True)
+ self.assertRaises(client_exceptions.BadRequest,
+ client.users.create,
+ name="",
+ password=uuid.uuid4().hex,
+ email=uuid.uuid4().hex)
+
+ def test_user_create_404(self):
+ client = self.get_client(admin=True)
+ self.assertRaises(client_exceptions.NotFound,
+ client.users.create,
+ name=uuid.uuid4().hex,
+ password=uuid.uuid4().hex,
+ email=uuid.uuid4().hex,
+ tenant_id=uuid.uuid4().hex)
+
+ def test_user_get_404(self):
+ client = self.get_client(admin=True)
+ self.assertRaises(client_exceptions.NotFound,
+ client.users.get,
+ user=uuid.uuid4().hex)
+
+ def test_user_list_404(self):
+ client = self.get_client(admin=True)
+ self.assertRaises(client_exceptions.NotFound,
+ client.users.list,
+ tenant_id=uuid.uuid4().hex)
+
+ def test_user_update_404(self):
+ client = self.get_client(admin=True)
+ self.assertRaises(client_exceptions.NotFound,
+ client.users.update,
+ user=uuid.uuid4().hex)
+
+ def test_user_update_tenant(self):
+ client = self.get_client(admin=True)
+ tenant_id = uuid.uuid4().hex
+ user = client.users.update(user=self.user_foo['id'],
+ tenant_id=tenant_id)
+ self.assertEqual(tenant_id, user.tenant_id)
+
+ def test_user_update_password_404(self):
+ client = self.get_client(admin=True)
+ self.assertRaises(client_exceptions.NotFound,
+ client.users.update_password,
+ user=uuid.uuid4().hex,
+ password=uuid.uuid4().hex)
+
+ def test_user_delete_404(self):
+ client = self.get_client(admin=True)
+ self.assertRaises(client_exceptions.NotFound,
+ client.users.delete,
+ user=uuid.uuid4().hex)
+
+ def test_user_list(self):
+ client = self.get_client(admin=True)
+ users = client.users.list()
+ self.assertTrue(len(users) > 0)
+ user = users[0]
+ self.assertRaises(AttributeError, lambda: user.password)
+
+ def test_user_get(self):
+ client = self.get_client(admin=True)
+ user = client.users.get(user=self.user_foo['id'])
+ self.assertRaises(AttributeError, lambda: user.password)
+
+ def test_role_get(self):
+ client = self.get_client(admin=True)
+ role = client.roles.get(role=self.role_admin['id'])
+ self.assertEqual(self.role_admin['id'], role.id)
+
+ def test_role_crud(self):
+ test_role = 'new_role'
+ client = self.get_client(admin=True)
+ role = client.roles.create(name=test_role)
+ self.assertEqual(test_role, role.name)
+
+ role = client.roles.get(role=role.id)
+ self.assertEqual(test_role, role.name)
+
+ client.roles.delete(role=role.id)
+
+ self.assertRaises(client_exceptions.NotFound,
+ client.roles.delete,
+ role=role.id)
+ self.assertRaises(client_exceptions.NotFound,
+ client.roles.get,
+ role=role.id)
+
+ def test_role_create_no_name(self):
+ client = self.get_client(admin=True)
+ self.assertRaises(client_exceptions.BadRequest,
+ client.roles.create,
+ name="")
+
+ def test_role_get_404(self):
+ client = self.get_client(admin=True)
+ self.assertRaises(client_exceptions.NotFound,
+ client.roles.get,
+ role=uuid.uuid4().hex)
+
+ def test_role_delete_404(self):
+ client = self.get_client(admin=True)
+ self.assertRaises(client_exceptions.NotFound,
+ client.roles.delete,
+ role=uuid.uuid4().hex)
+
+ def test_role_list_404(self):
+ client = self.get_client(admin=True)
+ self.assertRaises(client_exceptions.NotFound,
+ client.roles.roles_for_user,
+ user=uuid.uuid4().hex,
+ tenant=uuid.uuid4().hex)
+ self.assertRaises(client_exceptions.NotFound,
+ client.roles.roles_for_user,
+ user=self.user_foo['id'],
+ tenant=uuid.uuid4().hex)
+ self.assertRaises(client_exceptions.NotFound,
+ client.roles.roles_for_user,
+ user=uuid.uuid4().hex,
+ tenant=self.tenant_bar['id'])
+
+ def test_role_list(self):
+ client = self.get_client(admin=True)
+ roles = client.roles.list()
+ # TODO(devcamcar): This assert should be more specific.
+ self.assertTrue(len(roles) > 0)
+
+ def test_service_crud(self):
+ client = self.get_client(admin=True)
+
+ service_name = uuid.uuid4().hex
+ service_type = uuid.uuid4().hex
+ service_desc = uuid.uuid4().hex
+
+ # create & read
+ service = client.services.create(name=service_name,
+ service_type=service_type,
+ description=service_desc)
+ self.assertEqual(service_name, service.name)
+ self.assertEqual(service_type, service.type)
+ self.assertEqual(service_desc, service.description)
+
+ service = client.services.get(id=service.id)
+ self.assertEqual(service_name, service.name)
+ self.assertEqual(service_type, service.type)
+ self.assertEqual(service_desc, service.description)
+
+ service = [x for x in client.services.list() if x.id == service.id][0]
+ self.assertEqual(service_name, service.name)
+ self.assertEqual(service_type, service.type)
+ self.assertEqual(service_desc, service.description)
+
+ # update is not supported in API v2...
+
+ # delete & read
+ client.services.delete(id=service.id)
+ self.assertRaises(client_exceptions.NotFound,
+ client.services.get,
+ id=service.id)
+ services = [x for x in client.services.list() if x.id == service.id]
+ self.assertEqual(0, len(services))
+
+ def test_service_delete_404(self):
+ client = self.get_client(admin=True)
+ self.assertRaises(client_exceptions.NotFound,
+ client.services.delete,
+ id=uuid.uuid4().hex)
+
+ def test_service_get_404(self):
+ client = self.get_client(admin=True)
+ self.assertRaises(client_exceptions.NotFound,
+ client.services.get,
+ id=uuid.uuid4().hex)
+
+ def test_endpoint_delete_404(self):
+ client = self.get_client(admin=True)
+ self.assertRaises(client_exceptions.NotFound,
+ client.endpoints.delete,
+ id=uuid.uuid4().hex)
+
+ def test_admin_requires_adminness(self):
+ # FIXME(ja): this should be Unauthorized
+ exception = client_exceptions.ClientException
+
+ two = self.get_client(self.user_two, admin=True) # non-admin user
+
+ # USER CRUD
+ self.assertRaises(exception,
+ two.users.list)
+ self.assertRaises(exception,
+ two.users.get,
+ user=self.user_two['id'])
+ self.assertRaises(exception,
+ two.users.create,
+ name='oops',
+ password='password',
+ email='oops@test.com')
+ self.assertRaises(exception,
+ two.users.delete,
+ user=self.user_foo['id'])
+
+ # TENANT CRUD
+ self.assertRaises(exception,
+ two.tenants.list)
+ self.assertRaises(exception,
+ two.tenants.get,
+ tenant_id=self.tenant_bar['id'])
+ self.assertRaises(exception,
+ two.tenants.create,
+ tenant_name='oops',
+ description="shouldn't work!",
+ enabled=True)
+ self.assertRaises(exception,
+ two.tenants.delete,
+ tenant=self.tenant_baz['id'])
+
+ # ROLE CRUD
+ self.assertRaises(exception,
+ two.roles.get,
+ role=self.role_admin['id'])
+ self.assertRaises(exception,
+ two.roles.list)
+ self.assertRaises(exception,
+ two.roles.create,
+ name='oops')
+ self.assertRaises(exception,
+ two.roles.delete,
+ role=self.role_admin['id'])
+
+ # TODO(ja): MEMBERSHIP CRUD
+ # TODO(ja): determine what else todo
+
+ def test_tenant_add_and_remove_user(self):
+ client = self.get_client(admin=True)
+ client.roles.add_user_role(tenant=self.tenant_bar['id'],
+ user=self.user_two['id'],
+ role=self.role_other['id'])
+ user_refs = client.tenants.list_users(tenant=self.tenant_bar['id'])
+ self.assertIn(self.user_two['id'], [x.id for x in user_refs])
+ client.roles.remove_user_role(tenant=self.tenant_bar['id'],
+ user=self.user_two['id'],
+ role=self.role_other['id'])
+ roles = client.roles.roles_for_user(user=self.user_foo['id'],
+ tenant=self.tenant_bar['id'])
+ self.assertNotIn(self.role_other['id'], roles)
+ user_refs = client.tenants.list_users(tenant=self.tenant_bar['id'])
+ self.assertNotIn(self.user_two['id'], [x.id for x in user_refs])
+
+ def test_user_role_add_404(self):
+ client = self.get_client(admin=True)
+ self.assertRaises(client_exceptions.NotFound,
+ client.roles.add_user_role,
+ tenant=uuid.uuid4().hex,
+ user=self.user_foo['id'],
+ role=self.role_member['id'])
+ self.assertRaises(client_exceptions.NotFound,
+ client.roles.add_user_role,
+ tenant=self.tenant_baz['id'],
+ user=self.user_foo['id'],
+ role=uuid.uuid4().hex)
+
+ def test_user_role_add_no_user(self):
+ # If add_user_role and user doesn't exist, doesn't fail.
+ client = self.get_client(admin=True)
+ client.roles.add_user_role(tenant=self.tenant_baz['id'],
+ user=uuid.uuid4().hex,
+ role=self.role_member['id'])
+
+ def test_user_role_remove_404(self):
+ client = self.get_client(admin=True)
+ self.assertRaises(client_exceptions.NotFound,
+ client.roles.remove_user_role,
+ tenant=uuid.uuid4().hex,
+ user=self.user_foo['id'],
+ role=self.role_member['id'])
+ self.assertRaises(client_exceptions.NotFound,
+ client.roles.remove_user_role,
+ tenant=self.tenant_baz['id'],
+ user=uuid.uuid4().hex,
+ role=self.role_member['id'])
+ self.assertRaises(client_exceptions.NotFound,
+ client.roles.remove_user_role,
+ tenant=self.tenant_baz['id'],
+ user=self.user_foo['id'],
+ role=uuid.uuid4().hex)
+ self.assertRaises(client_exceptions.NotFound,
+ client.roles.remove_user_role,
+ tenant=self.tenant_baz['id'],
+ user=self.user_foo['id'],
+ role=self.role_member['id'])
+
+ def test_tenant_list_marker(self):
+ client = self.get_client()
+
+ # Add two arbitrary tenants to user for testing purposes
+ for i in range(2):
+ tenant_id = uuid.uuid4().hex
+ tenant = {'name': 'tenant-%s' % tenant_id, 'id': tenant_id,
+ 'domain_id': DEFAULT_DOMAIN_ID}
+ self.resource_api.create_project(tenant_id, tenant)
+ self.assignment_api.add_user_to_project(tenant_id,
+ self.user_foo['id'])
+
+ tenants = client.tenants.list()
+ self.assertEqual(3, len(tenants))
+
+ tenants_marker = client.tenants.list(marker=tenants[0].id)
+ self.assertEqual(2, len(tenants_marker))
+ self.assertEqual(tenants_marker[0].name, tenants[1].name)
+ self.assertEqual(tenants_marker[1].name, tenants[2].name)
+
+ def test_tenant_list_marker_not_found(self):
+ client = self.get_client()
+ self.assertRaises(client_exceptions.BadRequest,
+ client.tenants.list, marker=uuid.uuid4().hex)
+
+ def test_tenant_list_limit(self):
+ client = self.get_client()
+
+ # Add two arbitrary tenants to user for testing purposes
+ for i in range(2):
+ tenant_id = uuid.uuid4().hex
+ tenant = {'name': 'tenant-%s' % tenant_id, 'id': tenant_id,
+ 'domain_id': DEFAULT_DOMAIN_ID}
+ self.resource_api.create_project(tenant_id, tenant)
+ self.assignment_api.add_user_to_project(tenant_id,
+ self.user_foo['id'])
+
+ tenants = client.tenants.list()
+ self.assertEqual(3, len(tenants))
+
+ tenants_limited = client.tenants.list(limit=2)
+ self.assertEqual(2, len(tenants_limited))
+ self.assertEqual(tenants[0].name, tenants_limited[0].name)
+ self.assertEqual(tenants[1].name, tenants_limited[1].name)
+
+ def test_tenant_list_limit_bad_value(self):
+ client = self.get_client()
+ self.assertRaises(client_exceptions.BadRequest,
+ client.tenants.list, limit='a')
+ self.assertRaises(client_exceptions.BadRequest,
+ client.tenants.list, limit=-1)
+
+ def test_roles_get_by_user(self):
+ client = self.get_client(admin=True)
+ roles = client.roles.roles_for_user(user=self.user_foo['id'],
+ tenant=self.tenant_bar['id'])
+ self.assertTrue(len(roles) > 0)
+
+ def test_user_can_update_passwd(self):
+ client = self.get_client(self.user_two)
+
+ token_id = client.auth_token
+ new_password = uuid.uuid4().hex
+
+ # TODO(derekh): Update to use keystoneclient when available
+ class FakeResponse(object):
+ def start_fake_response(self, status, headers):
+ self.response_status = int(status.split(' ', 1)[0])
+ self.response_headers = dict(headers)
+ responseobject = FakeResponse()
+
+ req = webob.Request.blank(
+ '/v2.0/OS-KSCRUD/users/%s' % self.user_two['id'],
+ headers={'X-Auth-Token': token_id})
+ req.method = 'PATCH'
+ req.body = ('{"user":{"password":"%s","original_password":"%s"}}' %
+ (new_password, self.user_two['password']))
+ self.public_server.application(req.environ,
+ responseobject.start_fake_response)
+
+ self.user_two['password'] = new_password
+ self.get_client(self.user_two)
+
+ def test_user_cannot_update_other_users_passwd(self):
+ client = self.get_client(self.user_two)
+
+ token_id = client.auth_token
+ new_password = uuid.uuid4().hex
+
+ # TODO(derekh): Update to use keystoneclient when available
+ class FakeResponse(object):
+ def start_fake_response(self, status, headers):
+ self.response_status = int(status.split(' ', 1)[0])
+ self.response_headers = dict(headers)
+ responseobject = FakeResponse()
+
+ req = webob.Request.blank(
+ '/v2.0/OS-KSCRUD/users/%s' % self.user_foo['id'],
+ headers={'X-Auth-Token': token_id})
+ req.method = 'PATCH'
+ req.body = ('{"user":{"password":"%s","original_password":"%s"}}' %
+ (new_password, self.user_two['password']))
+ self.public_server.application(req.environ,
+ responseobject.start_fake_response)
+ self.assertEqual(403, responseobject.response_status)
+
+ self.user_two['password'] = new_password
+ self.assertRaises(client_exceptions.Unauthorized,
+ self.get_client, self.user_two)
+
+ def test_tokens_after_user_update_passwd(self):
+ client = self.get_client(self.user_two)
+
+ token_id = client.auth_token
+ new_password = uuid.uuid4().hex
+
+ # TODO(derekh): Update to use keystoneclient when available
+ class FakeResponse(object):
+ def start_fake_response(self, status, headers):
+ self.response_status = int(status.split(' ', 1)[0])
+ self.response_headers = dict(headers)
+ responseobject = FakeResponse()
+
+ req = webob.Request.blank(
+ '/v2.0/OS-KSCRUD/users/%s' % self.user_two['id'],
+ headers={'X-Auth-Token': token_id})
+ req.method = 'PATCH'
+ req.body = ('{"user":{"password":"%s","original_password":"%s"}}' %
+ (new_password, self.user_two['password']))
+
+ rv = self.public_server.application(
+ req.environ,
+ responseobject.start_fake_response)
+ response_json = jsonutils.loads(rv.pop())
+ new_token_id = response_json['access']['token']['id']
+
+ self.assertRaises(client_exceptions.Unauthorized, client.tenants.list)
+ client.auth_token = new_token_id
+ client.tenants.list()
diff --git a/keystone-moon/keystone/tests/unit/test_v2_keystoneclient_sql.py b/keystone-moon/keystone/tests/unit/test_v2_keystoneclient_sql.py
new file mode 100644
index 00000000..0fb60fd9
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/test_v2_keystoneclient_sql.py
@@ -0,0 +1,344 @@
+# Copyright 2012 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import uuid
+
+from keystoneclient.contrib.ec2 import utils as ec2_utils
+from keystoneclient import exceptions as client_exceptions
+
+from keystone.tests import unit as tests
+from keystone.tests.unit import test_v2_keystoneclient
+
+
+class ClientDrivenSqlTestCase(test_v2_keystoneclient.ClientDrivenTestCase):
+ def config_files(self):
+ config_files = super(ClientDrivenSqlTestCase, self).config_files()
+ config_files.append(tests.dirs.tests_conf('backend_sql.conf'))
+ return config_files
+
+ def setUp(self):
+ super(ClientDrivenSqlTestCase, self).setUp()
+ self.default_client = self.get_client()
+ self.addCleanup(self.cleanup_instance('default_client'))
+
+ def test_endpoint_crud(self):
+ client = self.get_client(admin=True)
+
+ service = client.services.create(name=uuid.uuid4().hex,
+ service_type=uuid.uuid4().hex,
+ description=uuid.uuid4().hex)
+
+ endpoint_region = uuid.uuid4().hex
+ invalid_service_id = uuid.uuid4().hex
+ endpoint_publicurl = uuid.uuid4().hex
+ endpoint_internalurl = uuid.uuid4().hex
+ endpoint_adminurl = uuid.uuid4().hex
+
+ # a non-existent service ID should trigger a 400
+ self.assertRaises(client_exceptions.BadRequest,
+ client.endpoints.create,
+ region=endpoint_region,
+ service_id=invalid_service_id,
+ publicurl=endpoint_publicurl,
+ adminurl=endpoint_adminurl,
+ internalurl=endpoint_internalurl)
+
+ endpoint = client.endpoints.create(region=endpoint_region,
+ service_id=service.id,
+ publicurl=endpoint_publicurl,
+ adminurl=endpoint_adminurl,
+ internalurl=endpoint_internalurl)
+
+ self.assertEqual(endpoint_region, endpoint.region)
+ self.assertEqual(service.id, endpoint.service_id)
+ self.assertEqual(endpoint_publicurl, endpoint.publicurl)
+ self.assertEqual(endpoint_internalurl, endpoint.internalurl)
+ self.assertEqual(endpoint_adminurl, endpoint.adminurl)
+
+ client.endpoints.delete(id=endpoint.id)
+ self.assertRaises(client_exceptions.NotFound, client.endpoints.delete,
+ id=endpoint.id)
+
+ def _send_ec2_auth_request(self, credentials, client=None):
+ if not client:
+ client = self.default_client
+ url = '%s/ec2tokens' % self.default_client.auth_url
+ (resp, token) = client.request(
+ url=url, method='POST',
+ body={'credentials': credentials})
+ return resp, token
+
+ def _generate_default_user_ec2_credentials(self):
+ cred = self. default_client.ec2.create(
+ user_id=self.user_foo['id'],
+ tenant_id=self.tenant_bar['id'])
+ return self._generate_user_ec2_credentials(cred.access, cred.secret)
+
+ def _generate_user_ec2_credentials(self, access, secret):
+ signer = ec2_utils.Ec2Signer(secret)
+ credentials = {'params': {'SignatureVersion': '2'},
+ 'access': access,
+ 'verb': 'GET',
+ 'host': 'localhost',
+ 'path': '/service/cloud'}
+ signature = signer.generate(credentials)
+ return credentials, signature
+
+ def test_ec2_auth_success(self):
+ credentials, signature = self._generate_default_user_ec2_credentials()
+ credentials['signature'] = signature
+ resp, token = self._send_ec2_auth_request(credentials)
+ self.assertEqual(200, resp.status_code)
+ self.assertIn('access', token)
+
+ def test_ec2_auth_success_trust(self):
+ # Add "other" role user_foo and create trust delegating it to user_two
+ self.assignment_api.add_role_to_user_and_project(
+ self.user_foo['id'],
+ self.tenant_bar['id'],
+ self.role_other['id'])
+ trust_id = 'atrust123'
+ trust = {'trustor_user_id': self.user_foo['id'],
+ 'trustee_user_id': self.user_two['id'],
+ 'project_id': self.tenant_bar['id'],
+ 'impersonation': True}
+ roles = [self.role_other]
+ self.trust_api.create_trust(trust_id, trust, roles)
+
+ # Create a client for user_two, scoped to the trust
+ client = self.get_client(self.user_two)
+ ret = client.authenticate(trust_id=trust_id,
+ tenant_id=self.tenant_bar['id'])
+ self.assertTrue(ret)
+ self.assertTrue(client.auth_ref.trust_scoped)
+ self.assertEqual(trust_id, client.auth_ref.trust_id)
+
+ # Create an ec2 keypair using the trust client impersonating user_foo
+ cred = client.ec2.create(user_id=self.user_foo['id'],
+ tenant_id=self.tenant_bar['id'])
+ credentials, signature = self._generate_user_ec2_credentials(
+ cred.access, cred.secret)
+ credentials['signature'] = signature
+ resp, token = self._send_ec2_auth_request(credentials)
+ self.assertEqual(200, resp.status_code)
+ self.assertEqual(trust_id, token['access']['trust']['id'])
+ # TODO(shardy) we really want to check the roles and trustee
+ # but because of where the stubbing happens we don't seem to
+ # hit the necessary code in controllers.py _authenticate_token
+ # so although all is OK via a real request, it incorrect in
+ # this test..
+
+ def test_ec2_auth_failure(self):
+ credentials, signature = self._generate_default_user_ec2_credentials()
+ credentials['signature'] = uuid.uuid4().hex
+ self.assertRaises(client_exceptions.Unauthorized,
+ self._send_ec2_auth_request,
+ credentials)
+
+ def test_ec2_credential_crud(self):
+ creds = self.default_client.ec2.list(user_id=self.user_foo['id'])
+ self.assertEqual([], creds)
+
+ cred = self.default_client.ec2.create(user_id=self.user_foo['id'],
+ tenant_id=self.tenant_bar['id'])
+ creds = self.default_client.ec2.list(user_id=self.user_foo['id'])
+ self.assertEqual(creds, [cred])
+ got = self.default_client.ec2.get(user_id=self.user_foo['id'],
+ access=cred.access)
+ self.assertEqual(cred, got)
+
+ self.default_client.ec2.delete(user_id=self.user_foo['id'],
+ access=cred.access)
+ creds = self.default_client.ec2.list(user_id=self.user_foo['id'])
+ self.assertEqual([], creds)
+
+ def test_ec2_credential_crud_non_admin(self):
+ na_client = self.get_client(self.user_two)
+ creds = na_client.ec2.list(user_id=self.user_two['id'])
+ self.assertEqual([], creds)
+
+ cred = na_client.ec2.create(user_id=self.user_two['id'],
+ tenant_id=self.tenant_baz['id'])
+ creds = na_client.ec2.list(user_id=self.user_two['id'])
+ self.assertEqual(creds, [cred])
+ got = na_client.ec2.get(user_id=self.user_two['id'],
+ access=cred.access)
+ self.assertEqual(cred, got)
+
+ na_client.ec2.delete(user_id=self.user_two['id'],
+ access=cred.access)
+ creds = na_client.ec2.list(user_id=self.user_two['id'])
+ self.assertEqual([], creds)
+
+ def test_ec2_list_credentials(self):
+ cred_1 = self.default_client.ec2.create(
+ user_id=self.user_foo['id'],
+ tenant_id=self.tenant_bar['id'])
+ cred_2 = self.default_client.ec2.create(
+ user_id=self.user_foo['id'],
+ tenant_id=self.tenant_service['id'])
+ cred_3 = self.default_client.ec2.create(
+ user_id=self.user_foo['id'],
+ tenant_id=self.tenant_mtu['id'])
+ two = self.get_client(self.user_two)
+ cred_4 = two.ec2.create(user_id=self.user_two['id'],
+ tenant_id=self.tenant_bar['id'])
+ creds = self.default_client.ec2.list(user_id=self.user_foo['id'])
+ self.assertEqual(3, len(creds))
+ self.assertEqual(sorted([cred_1, cred_2, cred_3],
+ key=lambda x: x.access),
+ sorted(creds, key=lambda x: x.access))
+ self.assertNotIn(cred_4, creds)
+
+ def test_ec2_credentials_create_404(self):
+ self.assertRaises(client_exceptions.NotFound,
+ self.default_client.ec2.create,
+ user_id=uuid.uuid4().hex,
+ tenant_id=self.tenant_bar['id'])
+ self.assertRaises(client_exceptions.NotFound,
+ self.default_client.ec2.create,
+ user_id=self.user_foo['id'],
+ tenant_id=uuid.uuid4().hex)
+
+ def test_ec2_credentials_delete_404(self):
+ self.assertRaises(client_exceptions.NotFound,
+ self.default_client.ec2.delete,
+ user_id=uuid.uuid4().hex,
+ access=uuid.uuid4().hex)
+
+ def test_ec2_credentials_get_404(self):
+ self.assertRaises(client_exceptions.NotFound,
+ self.default_client.ec2.get,
+ user_id=uuid.uuid4().hex,
+ access=uuid.uuid4().hex)
+
+ def test_ec2_credentials_list_404(self):
+ self.assertRaises(client_exceptions.NotFound,
+ self.default_client.ec2.list,
+ user_id=uuid.uuid4().hex)
+
+ def test_ec2_credentials_list_user_forbidden(self):
+ two = self.get_client(self.user_two)
+ self.assertRaises(client_exceptions.Forbidden, two.ec2.list,
+ user_id=self.user_foo['id'])
+
+ def test_ec2_credentials_get_user_forbidden(self):
+ cred = self.default_client.ec2.create(user_id=self.user_foo['id'],
+ tenant_id=self.tenant_bar['id'])
+
+ two = self.get_client(self.user_two)
+ self.assertRaises(client_exceptions.Forbidden, two.ec2.get,
+ user_id=self.user_foo['id'], access=cred.access)
+
+ self.default_client.ec2.delete(user_id=self.user_foo['id'],
+ access=cred.access)
+
+ def test_ec2_credentials_delete_user_forbidden(self):
+ cred = self.default_client.ec2.create(user_id=self.user_foo['id'],
+ tenant_id=self.tenant_bar['id'])
+
+ two = self.get_client(self.user_two)
+ self.assertRaises(client_exceptions.Forbidden, two.ec2.delete,
+ user_id=self.user_foo['id'], access=cred.access)
+
+ self.default_client.ec2.delete(user_id=self.user_foo['id'],
+ access=cred.access)
+
+ def test_endpoint_create_nonexistent_service(self):
+ client = self.get_client(admin=True)
+ self.assertRaises(client_exceptions.BadRequest,
+ client.endpoints.create,
+ region=uuid.uuid4().hex,
+ service_id=uuid.uuid4().hex,
+ publicurl=uuid.uuid4().hex,
+ adminurl=uuid.uuid4().hex,
+ internalurl=uuid.uuid4().hex)
+
+ def test_endpoint_delete_404(self):
+ client = self.get_client(admin=True)
+ self.assertRaises(client_exceptions.NotFound,
+ client.endpoints.delete,
+ id=uuid.uuid4().hex)
+
+ def test_policy_crud(self):
+ # FIXME(dolph): this test was written prior to the v3 implementation of
+ # the client and essentially refers to a non-existent
+ # policy manager in the v2 client. this test needs to be
+ # moved to a test suite running against the v3 api
+ self.skipTest('Written prior to v3 client; needs refactor')
+
+ client = self.get_client(admin=True)
+
+ policy_blob = uuid.uuid4().hex
+ policy_type = uuid.uuid4().hex
+ service = client.services.create(
+ name=uuid.uuid4().hex,
+ service_type=uuid.uuid4().hex,
+ description=uuid.uuid4().hex)
+ endpoint = client.endpoints.create(
+ service_id=service.id,
+ region=uuid.uuid4().hex,
+ adminurl=uuid.uuid4().hex,
+ internalurl=uuid.uuid4().hex,
+ publicurl=uuid.uuid4().hex)
+
+ # create
+ policy = client.policies.create(
+ blob=policy_blob,
+ type=policy_type,
+ endpoint=endpoint.id)
+ self.assertEqual(policy_blob, policy.policy)
+ self.assertEqual(policy_type, policy.type)
+ self.assertEqual(endpoint.id, policy.endpoint_id)
+
+ policy = client.policies.get(policy=policy.id)
+ self.assertEqual(policy_blob, policy.policy)
+ self.assertEqual(policy_type, policy.type)
+ self.assertEqual(endpoint.id, policy.endpoint_id)
+
+ endpoints = [x for x in client.endpoints.list() if x.id == endpoint.id]
+ endpoint = endpoints[0]
+ self.assertEqual(policy_blob, policy.policy)
+ self.assertEqual(policy_type, policy.type)
+ self.assertEqual(endpoint.id, policy.endpoint_id)
+
+ # update
+ policy_blob = uuid.uuid4().hex
+ policy_type = uuid.uuid4().hex
+ endpoint = client.endpoints.create(
+ service_id=service.id,
+ region=uuid.uuid4().hex,
+ adminurl=uuid.uuid4().hex,
+ internalurl=uuid.uuid4().hex,
+ publicurl=uuid.uuid4().hex)
+
+ policy = client.policies.update(
+ policy=policy.id,
+ blob=policy_blob,
+ type=policy_type,
+ endpoint=endpoint.id)
+
+ policy = client.policies.get(policy=policy.id)
+ self.assertEqual(policy_blob, policy.policy)
+ self.assertEqual(policy_type, policy.type)
+ self.assertEqual(endpoint.id, policy.endpoint_id)
+
+ # delete
+ client.policies.delete(policy=policy.id)
+ self.assertRaises(
+ client_exceptions.NotFound,
+ client.policies.get,
+ policy=policy.id)
+ policies = [x for x in client.policies.list() if x.id == policy.id]
+ self.assertEqual(0, len(policies))
diff --git a/keystone-moon/keystone/tests/unit/test_v3.py b/keystone-moon/keystone/tests/unit/test_v3.py
new file mode 100644
index 00000000..f6d6ed93
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/test_v3.py
@@ -0,0 +1,1283 @@
+# Copyright 2013 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import datetime
+import uuid
+
+from oslo_config import cfg
+from oslo_serialization import jsonutils
+from oslo_utils import timeutils
+import six
+from testtools import matchers
+
+from keystone import auth
+from keystone.common import authorization
+from keystone.common import cache
+from keystone import exception
+from keystone import middleware
+from keystone.policy.backends import rules
+from keystone.tests import unit as tests
+from keystone.tests.unit import rest
+
+
+CONF = cfg.CONF
+DEFAULT_DOMAIN_ID = 'default'
+
+TIME_FORMAT = '%Y-%m-%dT%H:%M:%S.%fZ'
+
+
+class AuthTestMixin(object):
+ """To hold auth building helper functions."""
+ def build_auth_scope(self, project_id=None, project_name=None,
+ project_domain_id=None, project_domain_name=None,
+ domain_id=None, domain_name=None, trust_id=None,
+ unscoped=None):
+ scope_data = {}
+ if unscoped:
+ scope_data['unscoped'] = {}
+ if project_id or project_name:
+ scope_data['project'] = {}
+ if project_id:
+ scope_data['project']['id'] = project_id
+ else:
+ scope_data['project']['name'] = project_name
+ if project_domain_id or project_domain_name:
+ project_domain_json = {}
+ if project_domain_id:
+ project_domain_json['id'] = project_domain_id
+ else:
+ project_domain_json['name'] = project_domain_name
+ scope_data['project']['domain'] = project_domain_json
+ if domain_id or domain_name:
+ scope_data['domain'] = {}
+ if domain_id:
+ scope_data['domain']['id'] = domain_id
+ else:
+ scope_data['domain']['name'] = domain_name
+ if trust_id:
+ scope_data['OS-TRUST:trust'] = {}
+ scope_data['OS-TRUST:trust']['id'] = trust_id
+ return scope_data
+
+ def build_password_auth(self, user_id=None, username=None,
+ user_domain_id=None, user_domain_name=None,
+ password=None):
+ password_data = {'user': {}}
+ if user_id:
+ password_data['user']['id'] = user_id
+ else:
+ password_data['user']['name'] = username
+ if user_domain_id or user_domain_name:
+ password_data['user']['domain'] = {}
+ if user_domain_id:
+ password_data['user']['domain']['id'] = user_domain_id
+ else:
+ password_data['user']['domain']['name'] = user_domain_name
+ password_data['user']['password'] = password
+ return password_data
+
+ def build_token_auth(self, token):
+ return {'id': token}
+
+ def build_authentication_request(self, token=None, user_id=None,
+ username=None, user_domain_id=None,
+ user_domain_name=None, password=None,
+ kerberos=False, **kwargs):
+ """Build auth dictionary.
+
+ It will create an auth dictionary based on all the arguments
+ that it receives.
+ """
+ auth_data = {}
+ auth_data['identity'] = {'methods': []}
+ if kerberos:
+ auth_data['identity']['methods'].append('kerberos')
+ auth_data['identity']['kerberos'] = {}
+ if token:
+ auth_data['identity']['methods'].append('token')
+ auth_data['identity']['token'] = self.build_token_auth(token)
+ if user_id or username:
+ auth_data['identity']['methods'].append('password')
+ auth_data['identity']['password'] = self.build_password_auth(
+ user_id, username, user_domain_id, user_domain_name, password)
+ if kwargs:
+ auth_data['scope'] = self.build_auth_scope(**kwargs)
+ return {'auth': auth_data}
+
+
+class RestfulTestCase(tests.SQLDriverOverrides, rest.RestfulTestCase,
+ AuthTestMixin):
+ def config_files(self):
+ config_files = super(RestfulTestCase, self).config_files()
+ config_files.append(tests.dirs.tests_conf('backend_sql.conf'))
+ return config_files
+
+ def get_extensions(self):
+ extensions = set(['revoke'])
+ if hasattr(self, 'EXTENSION_NAME'):
+ extensions.add(self.EXTENSION_NAME)
+ return extensions
+
+ def generate_paste_config(self):
+ new_paste_file = None
+ try:
+ new_paste_file = tests.generate_paste_config(self.EXTENSION_TO_ADD)
+ except AttributeError:
+ # no need to report this error here, as most tests will not have
+ # EXTENSION_TO_ADD defined.
+ pass
+ finally:
+ return new_paste_file
+
+ def remove_generated_paste_config(self):
+ try:
+ tests.remove_generated_paste_config(self.EXTENSION_TO_ADD)
+ except AttributeError:
+ pass
+
+ def setUp(self, app_conf='keystone'):
+ """Setup for v3 Restful Test Cases.
+
+ """
+ new_paste_file = self.generate_paste_config()
+ self.addCleanup(self.remove_generated_paste_config)
+ if new_paste_file:
+ app_conf = 'config:%s' % (new_paste_file)
+
+ super(RestfulTestCase, self).setUp(app_conf=app_conf)
+
+ self.empty_context = {'environment': {}}
+
+ # Initialize the policy engine and allow us to write to a temp
+ # file in each test to create the policies
+ rules.reset()
+
+ # drop the policy rules
+ self.addCleanup(rules.reset)
+
+ def load_backends(self):
+ # ensure the cache region instance is setup
+ cache.configure_cache_region(cache.REGION)
+
+ super(RestfulTestCase, self).load_backends()
+
+ def load_fixtures(self, fixtures):
+ self.load_sample_data()
+
+ def _populate_default_domain(self):
+ if CONF.database.connection == tests.IN_MEM_DB_CONN_STRING:
+ # NOTE(morganfainberg): If an in-memory db is being used, be sure
+ # to populate the default domain, this is typically done by
+ # a migration, but the in-mem db uses model definitions to create
+ # the schema (no migrations are run).
+ try:
+ self.resource_api.get_domain(DEFAULT_DOMAIN_ID)
+ except exception.DomainNotFound:
+ domain = {'description': (u'Owns users and tenants (i.e. '
+ u'projects) available on Identity '
+ u'API v2.'),
+ 'enabled': True,
+ 'id': DEFAULT_DOMAIN_ID,
+ 'name': u'Default'}
+ self.resource_api.create_domain(DEFAULT_DOMAIN_ID, domain)
+
+ def load_sample_data(self):
+ self._populate_default_domain()
+ self.domain_id = uuid.uuid4().hex
+ self.domain = self.new_domain_ref()
+ self.domain['id'] = self.domain_id
+ self.resource_api.create_domain(self.domain_id, self.domain)
+
+ self.project_id = uuid.uuid4().hex
+ self.project = self.new_project_ref(
+ domain_id=self.domain_id)
+ self.project['id'] = self.project_id
+ self.resource_api.create_project(self.project_id, self.project)
+
+ self.user = self.new_user_ref(domain_id=self.domain_id)
+ password = self.user['password']
+ self.user = self.identity_api.create_user(self.user)
+ self.user['password'] = password
+ self.user_id = self.user['id']
+
+ self.default_domain_project_id = uuid.uuid4().hex
+ self.default_domain_project = self.new_project_ref(
+ domain_id=DEFAULT_DOMAIN_ID)
+ self.default_domain_project['id'] = self.default_domain_project_id
+ self.resource_api.create_project(self.default_domain_project_id,
+ self.default_domain_project)
+
+ self.default_domain_user = self.new_user_ref(
+ domain_id=DEFAULT_DOMAIN_ID)
+ password = self.default_domain_user['password']
+ self.default_domain_user = (
+ self.identity_api.create_user(self.default_domain_user))
+ self.default_domain_user['password'] = password
+ self.default_domain_user_id = self.default_domain_user['id']
+
+ # create & grant policy.json's default role for admin_required
+ self.role_id = uuid.uuid4().hex
+ self.role = self.new_role_ref()
+ self.role['id'] = self.role_id
+ self.role['name'] = 'admin'
+ self.role_api.create_role(self.role_id, self.role)
+ self.assignment_api.add_role_to_user_and_project(
+ self.user_id, self.project_id, self.role_id)
+ self.assignment_api.add_role_to_user_and_project(
+ self.default_domain_user_id, self.default_domain_project_id,
+ self.role_id)
+ self.assignment_api.add_role_to_user_and_project(
+ self.default_domain_user_id, self.project_id,
+ self.role_id)
+
+ self.region_id = uuid.uuid4().hex
+ self.region = self.new_region_ref()
+ self.region['id'] = self.region_id
+ self.catalog_api.create_region(
+ self.region.copy())
+
+ self.service_id = uuid.uuid4().hex
+ self.service = self.new_service_ref()
+ self.service['id'] = self.service_id
+ self.catalog_api.create_service(
+ self.service_id,
+ self.service.copy())
+
+ self.endpoint_id = uuid.uuid4().hex
+ self.endpoint = self.new_endpoint_ref(service_id=self.service_id)
+ self.endpoint['id'] = self.endpoint_id
+ self.endpoint['region_id'] = self.region['id']
+ self.catalog_api.create_endpoint(
+ self.endpoint_id,
+ self.endpoint.copy())
+ # The server adds 'enabled' and defaults to True.
+ self.endpoint['enabled'] = True
+
+ def new_ref(self):
+ """Populates a ref with attributes common to all API entities."""
+ return {
+ 'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'description': uuid.uuid4().hex,
+ 'enabled': True}
+
+ def new_region_ref(self):
+ ref = self.new_ref()
+ # Region doesn't have name or enabled.
+ del ref['name']
+ del ref['enabled']
+ ref['parent_region_id'] = None
+ return ref
+
+ def new_service_ref(self):
+ ref = self.new_ref()
+ ref['type'] = uuid.uuid4().hex
+ return ref
+
+ def new_endpoint_ref(self, service_id, interface='public', **kwargs):
+ ref = self.new_ref()
+ del ref['enabled'] # enabled is optional
+ ref['interface'] = interface
+ ref['service_id'] = service_id
+ ref['url'] = 'https://' + uuid.uuid4().hex + '.com'
+ ref['region_id'] = self.region_id
+ ref.update(kwargs)
+ return ref
+
+ def new_domain_ref(self):
+ ref = self.new_ref()
+ return ref
+
+ def new_project_ref(self, domain_id, parent_id=None):
+ ref = self.new_ref()
+ ref['domain_id'] = domain_id
+ ref['parent_id'] = parent_id
+ return ref
+
+ def new_user_ref(self, domain_id, project_id=None):
+ ref = self.new_ref()
+ ref['domain_id'] = domain_id
+ ref['email'] = uuid.uuid4().hex
+ ref['password'] = uuid.uuid4().hex
+ if project_id:
+ ref['default_project_id'] = project_id
+ return ref
+
+ def new_group_ref(self, domain_id):
+ ref = self.new_ref()
+ ref['domain_id'] = domain_id
+ return ref
+
+ def new_credential_ref(self, user_id, project_id=None, cred_type=None):
+ ref = dict()
+ ref['id'] = uuid.uuid4().hex
+ ref['user_id'] = user_id
+ if cred_type == 'ec2':
+ ref['type'] = 'ec2'
+ ref['blob'] = {'blah': 'test'}
+ else:
+ ref['type'] = 'cert'
+ ref['blob'] = uuid.uuid4().hex
+ if project_id:
+ ref['project_id'] = project_id
+ return ref
+
+ def new_role_ref(self):
+ ref = self.new_ref()
+ # Roles don't have a description or the enabled flag
+ del ref['description']
+ del ref['enabled']
+ return ref
+
+ def new_policy_ref(self):
+ ref = self.new_ref()
+ ref['blob'] = uuid.uuid4().hex
+ ref['type'] = uuid.uuid4().hex
+ return ref
+
+ def new_trust_ref(self, trustor_user_id, trustee_user_id, project_id=None,
+ impersonation=None, expires=None, role_ids=None,
+ role_names=None, remaining_uses=None,
+ allow_redelegation=False):
+ ref = dict()
+ ref['id'] = uuid.uuid4().hex
+ ref['trustor_user_id'] = trustor_user_id
+ ref['trustee_user_id'] = trustee_user_id
+ ref['impersonation'] = impersonation or False
+ ref['project_id'] = project_id
+ ref['remaining_uses'] = remaining_uses
+ ref['allow_redelegation'] = allow_redelegation
+
+ if isinstance(expires, six.string_types):
+ ref['expires_at'] = expires
+ elif isinstance(expires, dict):
+ ref['expires_at'] = timeutils.strtime(
+ timeutils.utcnow() + datetime.timedelta(**expires),
+ fmt=TIME_FORMAT)
+ elif expires is None:
+ pass
+ else:
+ raise NotImplementedError('Unexpected value for "expires"')
+
+ role_ids = role_ids or []
+ role_names = role_names or []
+ if role_ids or role_names:
+ ref['roles'] = []
+ for role_id in role_ids:
+ ref['roles'].append({'id': role_id})
+ for role_name in role_names:
+ ref['roles'].append({'name': role_name})
+
+ return ref
+
+ def create_new_default_project_for_user(self, user_id, domain_id,
+ enable_project=True):
+ ref = self.new_project_ref(domain_id=domain_id)
+ ref['enabled'] = enable_project
+ r = self.post('/projects', body={'project': ref})
+ project = self.assertValidProjectResponse(r, ref)
+ # set the user's preferred project
+ body = {'user': {'default_project_id': project['id']}}
+ r = self.patch('/users/%(user_id)s' % {
+ 'user_id': user_id},
+ body=body)
+ self.assertValidUserResponse(r)
+
+ return project
+
+ def get_scoped_token(self):
+ """Convenience method so that we can test authenticated requests."""
+ r = self.admin_request(
+ method='POST',
+ path='/v3/auth/tokens',
+ body={
+ 'auth': {
+ 'identity': {
+ 'methods': ['password'],
+ 'password': {
+ 'user': {
+ 'name': self.user['name'],
+ 'password': self.user['password'],
+ 'domain': {
+ 'id': self.user['domain_id']
+ }
+ }
+ }
+ },
+ 'scope': {
+ 'project': {
+ 'id': self.project['id'],
+ }
+ }
+ }
+ })
+ return r.headers.get('X-Subject-Token')
+
+ def get_requested_token(self, auth):
+ """Request the specific token we want."""
+
+ r = self.v3_authenticate_token(auth)
+ return r.headers.get('X-Subject-Token')
+
+ def v3_authenticate_token(self, auth, expected_status=201):
+ return self.admin_request(method='POST',
+ path='/v3/auth/tokens',
+ body=auth,
+ expected_status=expected_status)
+
+ def v3_noauth_request(self, path, **kwargs):
+ # request does not require auth token header
+ path = '/v3' + path
+ return self.admin_request(path=path, **kwargs)
+
+ def v3_request(self, path, **kwargs):
+ # check to see if caller requires token for the API call.
+ if kwargs.pop('noauth', None):
+ return self.v3_noauth_request(path, **kwargs)
+
+ # Check if the caller has passed in auth details for
+ # use in requesting the token
+ auth_arg = kwargs.pop('auth', None)
+ if auth_arg:
+ token = self.get_requested_token(auth_arg)
+ else:
+ token = kwargs.pop('token', None)
+ if not token:
+ token = self.get_scoped_token()
+ path = '/v3' + path
+
+ return self.admin_request(path=path, token=token, **kwargs)
+
+ def get(self, path, **kwargs):
+ r = self.v3_request(method='GET', path=path, **kwargs)
+ if 'expected_status' not in kwargs:
+ self.assertResponseStatus(r, 200)
+ return r
+
+ def head(self, path, **kwargs):
+ r = self.v3_request(method='HEAD', path=path, **kwargs)
+ if 'expected_status' not in kwargs:
+ self.assertResponseStatus(r, 204)
+ self.assertEqual('', r.body)
+ return r
+
+ def post(self, path, **kwargs):
+ r = self.v3_request(method='POST', path=path, **kwargs)
+ if 'expected_status' not in kwargs:
+ self.assertResponseStatus(r, 201)
+ return r
+
+ def put(self, path, **kwargs):
+ r = self.v3_request(method='PUT', path=path, **kwargs)
+ if 'expected_status' not in kwargs:
+ self.assertResponseStatus(r, 204)
+ return r
+
+ def patch(self, path, **kwargs):
+ r = self.v3_request(method='PATCH', path=path, **kwargs)
+ if 'expected_status' not in kwargs:
+ self.assertResponseStatus(r, 200)
+ return r
+
+ def delete(self, path, **kwargs):
+ r = self.v3_request(method='DELETE', path=path, **kwargs)
+ if 'expected_status' not in kwargs:
+ self.assertResponseStatus(r, 204)
+ return r
+
+ def assertValidErrorResponse(self, r):
+ resp = r.result
+ self.assertIsNotNone(resp.get('error'))
+ self.assertIsNotNone(resp['error'].get('code'))
+ self.assertIsNotNone(resp['error'].get('title'))
+ self.assertIsNotNone(resp['error'].get('message'))
+ self.assertEqual(int(resp['error']['code']), r.status_code)
+
+ def assertValidListLinks(self, links, resource_url=None):
+ self.assertIsNotNone(links)
+ self.assertIsNotNone(links.get('self'))
+ self.assertThat(links['self'], matchers.StartsWith('http://localhost'))
+
+ if resource_url:
+ self.assertThat(links['self'], matchers.EndsWith(resource_url))
+
+ self.assertIn('next', links)
+ if links['next'] is not None:
+ self.assertThat(links['next'],
+ matchers.StartsWith('http://localhost'))
+
+ self.assertIn('previous', links)
+ if links['previous'] is not None:
+ self.assertThat(links['previous'],
+ matchers.StartsWith('http://localhost'))
+
+ def assertValidListResponse(self, resp, key, entity_validator, ref=None,
+ expected_length=None, keys_to_check=None,
+ resource_url=None):
+ """Make assertions common to all API list responses.
+
+ If a reference is provided, it's ID will be searched for in the
+ response, and asserted to be equal.
+
+ """
+ entities = resp.result.get(key)
+ self.assertIsNotNone(entities)
+
+ if expected_length is not None:
+ self.assertEqual(expected_length, len(entities))
+ elif ref is not None:
+ # we're at least expecting the ref
+ self.assertNotEmpty(entities)
+
+ # collections should have relational links
+ self.assertValidListLinks(resp.result.get('links'),
+ resource_url=resource_url)
+
+ for entity in entities:
+ self.assertIsNotNone(entity)
+ self.assertValidEntity(entity, keys_to_check=keys_to_check)
+ entity_validator(entity)
+ if ref:
+ entity = [x for x in entities if x['id'] == ref['id']][0]
+ self.assertValidEntity(entity, ref=ref,
+ keys_to_check=keys_to_check)
+ entity_validator(entity, ref)
+ return entities
+
+ def assertValidResponse(self, resp, key, entity_validator, *args,
+ **kwargs):
+ """Make assertions common to all API responses."""
+ entity = resp.result.get(key)
+ self.assertIsNotNone(entity)
+ keys = kwargs.pop('keys_to_check', None)
+ self.assertValidEntity(entity, keys_to_check=keys, *args, **kwargs)
+ entity_validator(entity, *args, **kwargs)
+ return entity
+
+ def assertValidEntity(self, entity, ref=None, keys_to_check=None):
+ """Make assertions common to all API entities.
+
+ If a reference is provided, the entity will also be compared against
+ the reference.
+ """
+ if keys_to_check is not None:
+ keys = keys_to_check
+ else:
+ keys = ['name', 'description', 'enabled']
+
+ for k in ['id'] + keys:
+ msg = '%s unexpectedly None in %s' % (k, entity)
+ self.assertIsNotNone(entity.get(k), msg)
+
+ self.assertIsNotNone(entity.get('links'))
+ self.assertIsNotNone(entity['links'].get('self'))
+ self.assertThat(entity['links']['self'],
+ matchers.StartsWith('http://localhost'))
+ self.assertIn(entity['id'], entity['links']['self'])
+
+ if ref:
+ for k in keys:
+ msg = '%s not equal: %s != %s' % (k, ref[k], entity[k])
+ self.assertEqual(ref[k], entity[k])
+
+ return entity
+
+ def assertDictContainsSubset(self, expected, actual):
+ """"Asserts if dictionary actual is a superset of expected.
+
+ Tests whether the key/value pairs in dictionary actual are a superset
+ of those in expected.
+
+ """
+ for k, v in expected.iteritems():
+ self.assertIn(k, actual)
+ if isinstance(v, dict):
+ self.assertDictContainsSubset(v, actual[k])
+ else:
+ self.assertEqual(v, actual[k])
+
+ # auth validation
+
+ def assertValidISO8601ExtendedFormatDatetime(self, dt):
+ try:
+ return timeutils.parse_strtime(dt, fmt=TIME_FORMAT)
+ except Exception:
+ msg = '%s is not a valid ISO 8601 extended format date time.' % dt
+ raise AssertionError(msg)
+ self.assertIsInstance(dt, datetime.datetime)
+
+ def assertValidTokenResponse(self, r, user=None):
+ self.assertTrue(r.headers.get('X-Subject-Token'))
+ token = r.result['token']
+
+ self.assertIsNotNone(token.get('expires_at'))
+ expires_at = self.assertValidISO8601ExtendedFormatDatetime(
+ token['expires_at'])
+ self.assertIsNotNone(token.get('issued_at'))
+ issued_at = self.assertValidISO8601ExtendedFormatDatetime(
+ token['issued_at'])
+ self.assertTrue(issued_at < expires_at)
+
+ self.assertIn('user', token)
+ self.assertIn('id', token['user'])
+ self.assertIn('name', token['user'])
+ self.assertIn('domain', token['user'])
+ self.assertIn('id', token['user']['domain'])
+
+ if user is not None:
+ self.assertEqual(user['id'], token['user']['id'])
+ self.assertEqual(user['name'], token['user']['name'])
+ self.assertEqual(user['domain_id'], token['user']['domain']['id'])
+
+ return token
+
+ def assertValidUnscopedTokenResponse(self, r, *args, **kwargs):
+ token = self.assertValidTokenResponse(r, *args, **kwargs)
+
+ self.assertNotIn('roles', token)
+ self.assertNotIn('catalog', token)
+ self.assertNotIn('project', token)
+ self.assertNotIn('domain', token)
+
+ return token
+
+ def assertValidScopedTokenResponse(self, r, *args, **kwargs):
+ require_catalog = kwargs.pop('require_catalog', True)
+ endpoint_filter = kwargs.pop('endpoint_filter', False)
+ ep_filter_assoc = kwargs.pop('ep_filter_assoc', 0)
+ token = self.assertValidTokenResponse(r, *args, **kwargs)
+
+ if require_catalog:
+ endpoint_num = 0
+ self.assertIn('catalog', token)
+
+ if isinstance(token['catalog'], list):
+ # only test JSON
+ for service in token['catalog']:
+ for endpoint in service['endpoints']:
+ self.assertNotIn('enabled', endpoint)
+ self.assertNotIn('legacy_endpoint_id', endpoint)
+ self.assertNotIn('service_id', endpoint)
+ endpoint_num += 1
+
+ # sub test for the OS-EP-FILTER extension enabled
+ if endpoint_filter:
+ self.assertEqual(ep_filter_assoc, endpoint_num)
+ else:
+ self.assertNotIn('catalog', token)
+
+ self.assertIn('roles', token)
+ self.assertTrue(token['roles'])
+ for role in token['roles']:
+ self.assertIn('id', role)
+ self.assertIn('name', role)
+
+ return token
+
+ def assertValidProjectScopedTokenResponse(self, r, *args, **kwargs):
+ token = self.assertValidScopedTokenResponse(r, *args, **kwargs)
+
+ self.assertIn('project', token)
+ self.assertIn('id', token['project'])
+ self.assertIn('name', token['project'])
+ self.assertIn('domain', token['project'])
+ self.assertIn('id', token['project']['domain'])
+ self.assertIn('name', token['project']['domain'])
+
+ self.assertEqual(self.role_id, token['roles'][0]['id'])
+
+ return token
+
+ def assertValidProjectTrustScopedTokenResponse(self, r, *args, **kwargs):
+ token = self.assertValidProjectScopedTokenResponse(r, *args, **kwargs)
+
+ trust = token.get('OS-TRUST:trust')
+ self.assertIsNotNone(trust)
+ self.assertIsNotNone(trust.get('id'))
+ self.assertIsInstance(trust.get('impersonation'), bool)
+ self.assertIsNotNone(trust.get('trustor_user'))
+ self.assertIsNotNone(trust.get('trustee_user'))
+ self.assertIsNotNone(trust['trustor_user'].get('id'))
+ self.assertIsNotNone(trust['trustee_user'].get('id'))
+
+ def assertValidDomainScopedTokenResponse(self, r, *args, **kwargs):
+ token = self.assertValidScopedTokenResponse(r, *args, **kwargs)
+
+ self.assertIn('domain', token)
+ self.assertIn('id', token['domain'])
+ self.assertIn('name', token['domain'])
+
+ return token
+
+ def assertEqualTokens(self, a, b):
+ """Assert that two tokens are equal.
+
+ Compare two tokens except for their ids. This also truncates
+ the time in the comparison.
+ """
+ def normalize(token):
+ del token['token']['expires_at']
+ del token['token']['issued_at']
+ return token
+
+ a_expires_at = self.assertValidISO8601ExtendedFormatDatetime(
+ a['token']['expires_at'])
+ b_expires_at = self.assertValidISO8601ExtendedFormatDatetime(
+ b['token']['expires_at'])
+ self.assertCloseEnoughForGovernmentWork(a_expires_at, b_expires_at)
+
+ a_issued_at = self.assertValidISO8601ExtendedFormatDatetime(
+ a['token']['issued_at'])
+ b_issued_at = self.assertValidISO8601ExtendedFormatDatetime(
+ b['token']['issued_at'])
+ self.assertCloseEnoughForGovernmentWork(a_issued_at, b_issued_at)
+
+ return self.assertDictEqual(normalize(a), normalize(b))
+
+ # catalog validation
+
+ def assertValidCatalogResponse(self, resp, *args, **kwargs):
+ self.assertEqual(set(['catalog', 'links']), set(resp.json.keys()))
+ self.assertValidCatalog(resp.json['catalog'])
+ self.assertIn('links', resp.json)
+ self.assertIsInstance(resp.json['links'], dict)
+ self.assertEqual(['self'], resp.json['links'].keys())
+ self.assertEqual(
+ 'http://localhost/v3/auth/catalog',
+ resp.json['links']['self'])
+
+ def assertValidCatalog(self, entity):
+ self.assertIsInstance(entity, list)
+ self.assertTrue(len(entity) > 0)
+ for service in entity:
+ self.assertIsNotNone(service.get('id'))
+ self.assertIsNotNone(service.get('name'))
+ self.assertIsNotNone(service.get('type'))
+ self.assertNotIn('enabled', service)
+ self.assertTrue(len(service['endpoints']) > 0)
+ for endpoint in service['endpoints']:
+ self.assertIsNotNone(endpoint.get('id'))
+ self.assertIsNotNone(endpoint.get('interface'))
+ self.assertIsNotNone(endpoint.get('url'))
+ self.assertNotIn('enabled', endpoint)
+ self.assertNotIn('legacy_endpoint_id', endpoint)
+ self.assertNotIn('service_id', endpoint)
+
+ # region validation
+
+ def assertValidRegionListResponse(self, resp, *args, **kwargs):
+ # NOTE(jaypipes): I have to pass in a blank keys_to_check parameter
+ # below otherwise the base assertValidEntity method
+ # tries to find a "name" and an "enabled" key in the
+ # returned ref dicts. The issue is, I don't understand
+ # how the service and endpoint entity assertions below
+ # actually work (they don't raise assertions), since
+ # AFAICT, the service and endpoint tables don't have
+ # a "name" column either... :(
+ return self.assertValidListResponse(
+ resp,
+ 'regions',
+ self.assertValidRegion,
+ keys_to_check=[],
+ *args,
+ **kwargs)
+
+ def assertValidRegionResponse(self, resp, *args, **kwargs):
+ return self.assertValidResponse(
+ resp,
+ 'region',
+ self.assertValidRegion,
+ keys_to_check=[],
+ *args,
+ **kwargs)
+
+ def assertValidRegion(self, entity, ref=None):
+ self.assertIsNotNone(entity.get('description'))
+ if ref:
+ self.assertEqual(ref['description'], entity['description'])
+ return entity
+
+ # service validation
+
+ def assertValidServiceListResponse(self, resp, *args, **kwargs):
+ return self.assertValidListResponse(
+ resp,
+ 'services',
+ self.assertValidService,
+ *args,
+ **kwargs)
+
+ def assertValidServiceResponse(self, resp, *args, **kwargs):
+ return self.assertValidResponse(
+ resp,
+ 'service',
+ self.assertValidService,
+ *args,
+ **kwargs)
+
+ def assertValidService(self, entity, ref=None):
+ self.assertIsNotNone(entity.get('type'))
+ self.assertIsInstance(entity.get('enabled'), bool)
+ if ref:
+ self.assertEqual(ref['type'], entity['type'])
+ return entity
+
+ # endpoint validation
+
+ def assertValidEndpointListResponse(self, resp, *args, **kwargs):
+ return self.assertValidListResponse(
+ resp,
+ 'endpoints',
+ self.assertValidEndpoint,
+ *args,
+ **kwargs)
+
+ def assertValidEndpointResponse(self, resp, *args, **kwargs):
+ return self.assertValidResponse(
+ resp,
+ 'endpoint',
+ self.assertValidEndpoint,
+ *args,
+ **kwargs)
+
+ def assertValidEndpoint(self, entity, ref=None):
+ self.assertIsNotNone(entity.get('interface'))
+ self.assertIsNotNone(entity.get('service_id'))
+ self.assertIsInstance(entity['enabled'], bool)
+
+ # this is intended to be an unexposed implementation detail
+ self.assertNotIn('legacy_endpoint_id', entity)
+
+ if ref:
+ self.assertEqual(ref['interface'], entity['interface'])
+ self.assertEqual(ref['service_id'], entity['service_id'])
+ if ref.get('region') is not None:
+ self.assertEqual(ref['region_id'], entity.get('region_id'))
+
+ return entity
+
+ # domain validation
+
+ def assertValidDomainListResponse(self, resp, *args, **kwargs):
+ return self.assertValidListResponse(
+ resp,
+ 'domains',
+ self.assertValidDomain,
+ *args,
+ **kwargs)
+
+ def assertValidDomainResponse(self, resp, *args, **kwargs):
+ return self.assertValidResponse(
+ resp,
+ 'domain',
+ self.assertValidDomain,
+ *args,
+ **kwargs)
+
+ def assertValidDomain(self, entity, ref=None):
+ if ref:
+ pass
+ return entity
+
+ # project validation
+
+ def assertValidProjectListResponse(self, resp, *args, **kwargs):
+ return self.assertValidListResponse(
+ resp,
+ 'projects',
+ self.assertValidProject,
+ *args,
+ **kwargs)
+
+ def assertValidProjectResponse(self, resp, *args, **kwargs):
+ return self.assertValidResponse(
+ resp,
+ 'project',
+ self.assertValidProject,
+ *args,
+ **kwargs)
+
+ def assertValidProject(self, entity, ref=None):
+ self.assertIsNotNone(entity.get('domain_id'))
+ if ref:
+ self.assertEqual(ref['domain_id'], entity['domain_id'])
+ return entity
+
+ # user validation
+
+ def assertValidUserListResponse(self, resp, *args, **kwargs):
+ return self.assertValidListResponse(
+ resp,
+ 'users',
+ self.assertValidUser,
+ *args,
+ **kwargs)
+
+ def assertValidUserResponse(self, resp, *args, **kwargs):
+ return self.assertValidResponse(
+ resp,
+ 'user',
+ self.assertValidUser,
+ *args,
+ **kwargs)
+
+ def assertValidUser(self, entity, ref=None):
+ self.assertIsNotNone(entity.get('domain_id'))
+ self.assertIsNotNone(entity.get('email'))
+ self.assertIsNone(entity.get('password'))
+ self.assertNotIn('tenantId', entity)
+ if ref:
+ self.assertEqual(ref['domain_id'], entity['domain_id'])
+ self.assertEqual(ref['email'], entity['email'])
+ if 'default_project_id' in ref:
+ self.assertIsNotNone(ref['default_project_id'])
+ self.assertEqual(ref['default_project_id'],
+ entity['default_project_id'])
+ return entity
+
+ # group validation
+
+ def assertValidGroupListResponse(self, resp, *args, **kwargs):
+ return self.assertValidListResponse(
+ resp,
+ 'groups',
+ self.assertValidGroup,
+ *args,
+ **kwargs)
+
+ def assertValidGroupResponse(self, resp, *args, **kwargs):
+ return self.assertValidResponse(
+ resp,
+ 'group',
+ self.assertValidGroup,
+ *args,
+ **kwargs)
+
+ def assertValidGroup(self, entity, ref=None):
+ self.assertIsNotNone(entity.get('name'))
+ if ref:
+ self.assertEqual(ref['name'], entity['name'])
+ return entity
+
+ # credential validation
+
+ def assertValidCredentialListResponse(self, resp, *args, **kwargs):
+ return self.assertValidListResponse(
+ resp,
+ 'credentials',
+ self.assertValidCredential,
+ keys_to_check=['blob', 'user_id', 'type'],
+ *args,
+ **kwargs)
+
+ def assertValidCredentialResponse(self, resp, *args, **kwargs):
+ return self.assertValidResponse(
+ resp,
+ 'credential',
+ self.assertValidCredential,
+ keys_to_check=['blob', 'user_id', 'type'],
+ *args,
+ **kwargs)
+
+ def assertValidCredential(self, entity, ref=None):
+ self.assertIsNotNone(entity.get('user_id'))
+ self.assertIsNotNone(entity.get('blob'))
+ self.assertIsNotNone(entity.get('type'))
+ if ref:
+ self.assertEqual(ref['user_id'], entity['user_id'])
+ self.assertEqual(ref['blob'], entity['blob'])
+ self.assertEqual(ref['type'], entity['type'])
+ self.assertEqual(ref.get('project_id'), entity.get('project_id'))
+ return entity
+
+ # role validation
+
+ def assertValidRoleListResponse(self, resp, *args, **kwargs):
+ return self.assertValidListResponse(
+ resp,
+ 'roles',
+ self.assertValidRole,
+ keys_to_check=['name'],
+ *args,
+ **kwargs)
+
+ def assertValidRoleResponse(self, resp, *args, **kwargs):
+ return self.assertValidResponse(
+ resp,
+ 'role',
+ self.assertValidRole,
+ keys_to_check=['name'],
+ *args,
+ **kwargs)
+
+ def assertValidRole(self, entity, ref=None):
+ self.assertIsNotNone(entity.get('name'))
+ if ref:
+ self.assertEqual(ref['name'], entity['name'])
+ return entity
+
+ # role assignment validation
+
+ def assertValidRoleAssignmentListResponse(self, resp, expected_length=None,
+ resource_url=None):
+ entities = resp.result.get('role_assignments')
+
+ if expected_length:
+ self.assertEqual(expected_length, len(entities))
+
+ # Collections should have relational links
+ self.assertValidListLinks(resp.result.get('links'),
+ resource_url=resource_url)
+
+ for entity in entities:
+ self.assertIsNotNone(entity)
+ self.assertValidRoleAssignment(entity)
+ return entities
+
+ def assertValidRoleAssignment(self, entity, ref=None):
+ # A role should be present
+ self.assertIsNotNone(entity.get('role'))
+ self.assertIsNotNone(entity['role'].get('id'))
+
+ # Only one of user or group should be present
+ if entity.get('user'):
+ self.assertIsNone(entity.get('group'))
+ self.assertIsNotNone(entity['user'].get('id'))
+ else:
+ self.assertIsNotNone(entity.get('group'))
+ self.assertIsNotNone(entity['group'].get('id'))
+
+ # A scope should be present and have only one of domain or project
+ self.assertIsNotNone(entity.get('scope'))
+
+ if entity['scope'].get('project'):
+ self.assertIsNone(entity['scope'].get('domain'))
+ self.assertIsNotNone(entity['scope']['project'].get('id'))
+ else:
+ self.assertIsNotNone(entity['scope'].get('domain'))
+ self.assertIsNotNone(entity['scope']['domain'].get('id'))
+
+ # An assignment link should be present
+ self.assertIsNotNone(entity.get('links'))
+ self.assertIsNotNone(entity['links'].get('assignment'))
+
+ if ref:
+ links = ref.pop('links')
+ try:
+ self.assertDictContainsSubset(ref, entity)
+ self.assertIn(links['assignment'],
+ entity['links']['assignment'])
+ finally:
+ if links:
+ ref['links'] = links
+
+ def assertRoleAssignmentInListResponse(self, resp, ref, expected=1):
+
+ found_count = 0
+ for entity in resp.result.get('role_assignments'):
+ try:
+ self.assertValidRoleAssignment(entity, ref=ref)
+ except Exception:
+ # It doesn't match, so let's go onto the next one
+ pass
+ else:
+ found_count += 1
+ self.assertEqual(expected, found_count)
+
+ def assertRoleAssignmentNotInListResponse(self, resp, ref):
+ self.assertRoleAssignmentInListResponse(resp, ref=ref, expected=0)
+
+ # policy validation
+
+ def assertValidPolicyListResponse(self, resp, *args, **kwargs):
+ return self.assertValidListResponse(
+ resp,
+ 'policies',
+ self.assertValidPolicy,
+ *args,
+ **kwargs)
+
+ def assertValidPolicyResponse(self, resp, *args, **kwargs):
+ return self.assertValidResponse(
+ resp,
+ 'policy',
+ self.assertValidPolicy,
+ *args,
+ **kwargs)
+
+ def assertValidPolicy(self, entity, ref=None):
+ self.assertIsNotNone(entity.get('blob'))
+ self.assertIsNotNone(entity.get('type'))
+ if ref:
+ self.assertEqual(ref['blob'], entity['blob'])
+ self.assertEqual(ref['type'], entity['type'])
+ return entity
+
+ # trust validation
+
+ def assertValidTrustListResponse(self, resp, *args, **kwargs):
+ return self.assertValidListResponse(
+ resp,
+ 'trusts',
+ self.assertValidTrustSummary,
+ keys_to_check=['trustor_user_id',
+ 'trustee_user_id',
+ 'impersonation'],
+ *args,
+ **kwargs)
+
+ def assertValidTrustResponse(self, resp, *args, **kwargs):
+ return self.assertValidResponse(
+ resp,
+ 'trust',
+ self.assertValidTrust,
+ keys_to_check=['trustor_user_id',
+ 'trustee_user_id',
+ 'impersonation'],
+ *args,
+ **kwargs)
+
+ def assertValidTrustSummary(self, entity, ref=None):
+ return self.assertValidTrust(entity, ref, summary=True)
+
+ def assertValidTrust(self, entity, ref=None, summary=False):
+ self.assertIsNotNone(entity.get('trustor_user_id'))
+ self.assertIsNotNone(entity.get('trustee_user_id'))
+ self.assertIsNotNone(entity.get('impersonation'))
+
+ self.assertIn('expires_at', entity)
+ if entity['expires_at'] is not None:
+ self.assertValidISO8601ExtendedFormatDatetime(entity['expires_at'])
+
+ if summary:
+ # Trust list contains no roles, but getting a specific
+ # trust by ID provides the detailed response containing roles
+ self.assertNotIn('roles', entity)
+ self.assertIn('project_id', entity)
+ else:
+ for role in entity['roles']:
+ self.assertIsNotNone(role)
+ self.assertValidEntity(role, keys_to_check=['name'])
+ self.assertValidRole(role)
+
+ self.assertValidListLinks(entity.get('roles_links'))
+
+ # always disallow role xor project_id (neither or both is allowed)
+ has_roles = bool(entity.get('roles'))
+ has_project = bool(entity.get('project_id'))
+ self.assertFalse(has_roles ^ has_project)
+
+ if ref:
+ self.assertEqual(ref['trustor_user_id'], entity['trustor_user_id'])
+ self.assertEqual(ref['trustee_user_id'], entity['trustee_user_id'])
+ self.assertEqual(ref['project_id'], entity['project_id'])
+ if entity.get('expires_at') or ref.get('expires_at'):
+ entity_exp = self.assertValidISO8601ExtendedFormatDatetime(
+ entity['expires_at'])
+ ref_exp = self.assertValidISO8601ExtendedFormatDatetime(
+ ref['expires_at'])
+ self.assertCloseEnoughForGovernmentWork(entity_exp, ref_exp)
+ else:
+ self.assertEqual(ref.get('expires_at'),
+ entity.get('expires_at'))
+
+ return entity
+
+ def build_external_auth_request(self, remote_user,
+ remote_domain=None, auth_data=None,
+ kerberos=False):
+ context = {'environment': {'REMOTE_USER': remote_user,
+ 'AUTH_TYPE': 'Negotiate'}}
+ if remote_domain:
+ context['environment']['REMOTE_DOMAIN'] = remote_domain
+ if not auth_data:
+ auth_data = self.build_authentication_request(
+ kerberos=kerberos)['auth']
+ no_context = None
+ auth_info = auth.controllers.AuthInfo.create(no_context, auth_data)
+ auth_context = {'extras': {}, 'method_names': []}
+ return context, auth_info, auth_context
+
+
+class VersionTestCase(RestfulTestCase):
+ def test_get_version(self):
+ pass
+
+
+# NOTE(gyee): test AuthContextMiddleware here instead of test_middleware.py
+# because we need the token
+class AuthContextMiddlewareTestCase(RestfulTestCase):
+ def _mock_request_object(self, token_id):
+
+ class fake_req(object):
+ headers = {middleware.AUTH_TOKEN_HEADER: token_id}
+ environ = {}
+
+ return fake_req()
+
+ def test_auth_context_build_by_middleware(self):
+ # test to make sure AuthContextMiddleware successful build the auth
+ # context from the incoming auth token
+ admin_token = self.get_scoped_token()
+ req = self._mock_request_object(admin_token)
+ application = None
+ middleware.AuthContextMiddleware(application).process_request(req)
+ self.assertEqual(
+ self.user['id'],
+ req.environ.get(authorization.AUTH_CONTEXT_ENV)['user_id'])
+
+ def test_auth_context_override(self):
+ overridden_context = 'OVERRIDDEN_CONTEXT'
+ # this token should not be used
+ token = uuid.uuid4().hex
+ req = self._mock_request_object(token)
+ req.environ[authorization.AUTH_CONTEXT_ENV] = overridden_context
+ application = None
+ middleware.AuthContextMiddleware(application).process_request(req)
+ # make sure overridden context take precedence
+ self.assertEqual(overridden_context,
+ req.environ.get(authorization.AUTH_CONTEXT_ENV))
+
+ def test_admin_token_auth_context(self):
+ # test to make sure AuthContextMiddleware does not attempt to build
+ # auth context if the incoming auth token is the special admin token
+ req = self._mock_request_object(CONF.admin_token)
+ application = None
+ middleware.AuthContextMiddleware(application).process_request(req)
+ self.assertDictEqual(req.environ.get(authorization.AUTH_CONTEXT_ENV),
+ {})
+
+
+class JsonHomeTestMixin(object):
+ """JSON Home test
+
+ Mixin this class to provide a test for the JSON-Home response for an
+ extension.
+
+ The base class must set JSON_HOME_DATA to a dict of relationship URLs
+ (rels) to the JSON-Home data for the relationship. The rels and associated
+ data must be in the response.
+
+ """
+ def test_get_json_home(self):
+ resp = self.get('/', convert=False,
+ headers={'Accept': 'application/json-home'})
+ self.assertThat(resp.headers['Content-Type'],
+ matchers.Equals('application/json-home'))
+ resp_data = jsonutils.loads(resp.body)
+
+ # Check that the example relationships are present.
+ for rel in self.JSON_HOME_DATA:
+ self.assertThat(resp_data['resources'][rel],
+ matchers.Equals(self.JSON_HOME_DATA[rel]))
diff --git a/keystone-moon/keystone/tests/unit/test_v3_assignment.py b/keystone-moon/keystone/tests/unit/test_v3_assignment.py
new file mode 100644
index 00000000..add14bfb
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/test_v3_assignment.py
@@ -0,0 +1,2943 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import random
+import six
+import uuid
+
+from oslo_config import cfg
+
+from keystone.common import controller
+from keystone import exception
+from keystone.tests import unit as tests
+from keystone.tests.unit import test_v3
+
+
+CONF = cfg.CONF
+
+
+def _build_role_assignment_query_url(effective=False, **filters):
+ '''Build and return a role assignment query url with provided params.
+
+ Available filters are: domain_id, project_id, user_id, group_id, role_id
+ and inherited_to_projects.
+
+ '''
+
+ query_params = '?effective' if effective else ''
+
+ for k, v in six.iteritems(filters):
+ query_params += '?' if not query_params else '&'
+
+ if k == 'inherited_to_projects':
+ query_params += 'scope.OS-INHERIT:inherited_to=projects'
+ else:
+ if k in ['domain_id', 'project_id']:
+ query_params += 'scope.'
+ elif k not in ['user_id', 'group_id', 'role_id']:
+ raise ValueError('Invalid key \'%s\' in provided filters.' % k)
+
+ query_params += '%s=%s' % (k.replace('_', '.'), v)
+
+ return '/role_assignments%s' % query_params
+
+
+def _build_role_assignment_link(**attribs):
+ """Build and return a role assignment link with provided attributes.
+
+ Provided attributes are expected to contain: domain_id or project_id,
+ user_id or group_id, role_id and, optionally, inherited_to_projects.
+
+ """
+
+ if attribs.get('domain_id'):
+ link = '/domains/' + attribs['domain_id']
+ else:
+ link = '/projects/' + attribs['project_id']
+
+ if attribs.get('user_id'):
+ link += '/users/' + attribs['user_id']
+ else:
+ link += '/groups/' + attribs['group_id']
+
+ link += '/roles/' + attribs['role_id']
+
+ if attribs.get('inherited_to_projects'):
+ return '/OS-INHERIT%s/inherited_to_projects' % link
+
+ return link
+
+
+def _build_role_assignment_entity(link=None, **attribs):
+ """Build and return a role assignment entity with provided attributes.
+
+ Provided attributes are expected to contain: domain_id or project_id,
+ user_id or group_id, role_id and, optionally, inherited_to_projects.
+
+ """
+
+ entity = {'links': {'assignment': (
+ link or _build_role_assignment_link(**attribs))}}
+
+ if attribs.get('domain_id'):
+ entity['scope'] = {'domain': {'id': attribs['domain_id']}}
+ else:
+ entity['scope'] = {'project': {'id': attribs['project_id']}}
+
+ if attribs.get('user_id'):
+ entity['user'] = {'id': attribs['user_id']}
+
+ if attribs.get('group_id'):
+ entity['links']['membership'] = ('/groups/%s/users/%s' %
+ (attribs['group_id'],
+ attribs['user_id']))
+ else:
+ entity['group'] = {'id': attribs['group_id']}
+
+ entity['role'] = {'id': attribs['role_id']}
+
+ if attribs.get('inherited_to_projects'):
+ entity['scope']['OS-INHERIT:inherited_to'] = 'projects'
+
+ return entity
+
+
+class AssignmentTestCase(test_v3.RestfulTestCase):
+ """Test domains, projects, roles and role assignments."""
+
+ def setUp(self):
+ super(AssignmentTestCase, self).setUp()
+
+ self.group = self.new_group_ref(
+ domain_id=self.domain_id)
+ self.group = self.identity_api.create_group(self.group)
+ self.group_id = self.group['id']
+
+ self.credential_id = uuid.uuid4().hex
+ self.credential = self.new_credential_ref(
+ user_id=self.user['id'],
+ project_id=self.project_id)
+ self.credential['id'] = self.credential_id
+ self.credential_api.create_credential(
+ self.credential_id,
+ self.credential)
+
+ # Domain CRUD tests
+
+ def test_create_domain(self):
+ """Call ``POST /domains``."""
+ ref = self.new_domain_ref()
+ r = self.post(
+ '/domains',
+ body={'domain': ref})
+ return self.assertValidDomainResponse(r, ref)
+
+ def test_create_domain_case_sensitivity(self):
+ """Call `POST /domains`` twice with upper() and lower() cased name."""
+ ref = self.new_domain_ref()
+
+ # ensure the name is lowercase
+ ref['name'] = ref['name'].lower()
+ r = self.post(
+ '/domains',
+ body={'domain': ref})
+ self.assertValidDomainResponse(r, ref)
+
+ # ensure the name is uppercase
+ ref['name'] = ref['name'].upper()
+ r = self.post(
+ '/domains',
+ body={'domain': ref})
+ self.assertValidDomainResponse(r, ref)
+
+ def test_create_domain_400(self):
+ """Call ``POST /domains``."""
+ self.post('/domains', body={'domain': {}}, expected_status=400)
+
+ def test_list_domains(self):
+ """Call ``GET /domains``."""
+ resource_url = '/domains'
+ r = self.get(resource_url)
+ self.assertValidDomainListResponse(r, ref=self.domain,
+ resource_url=resource_url)
+
+ def test_get_domain(self):
+ """Call ``GET /domains/{domain_id}``."""
+ r = self.get('/domains/%(domain_id)s' % {
+ 'domain_id': self.domain_id})
+ self.assertValidDomainResponse(r, self.domain)
+
+ def test_update_domain(self):
+ """Call ``PATCH /domains/{domain_id}``."""
+ ref = self.new_domain_ref()
+ del ref['id']
+ r = self.patch('/domains/%(domain_id)s' % {
+ 'domain_id': self.domain_id},
+ body={'domain': ref})
+ self.assertValidDomainResponse(r, ref)
+
+ def test_disable_domain(self):
+ """Call ``PATCH /domains/{domain_id}`` (set enabled=False)."""
+ # Create a 2nd set of entities in a 2nd domain
+ self.domain2 = self.new_domain_ref()
+ self.resource_api.create_domain(self.domain2['id'], self.domain2)
+
+ self.project2 = self.new_project_ref(
+ domain_id=self.domain2['id'])
+ self.resource_api.create_project(self.project2['id'], self.project2)
+
+ self.user2 = self.new_user_ref(
+ domain_id=self.domain2['id'],
+ project_id=self.project2['id'])
+ password = self.user2['password']
+ self.user2 = self.identity_api.create_user(self.user2)
+ self.user2['password'] = password
+
+ self.assignment_api.add_user_to_project(self.project2['id'],
+ self.user2['id'])
+
+ # First check a user in that domain can authenticate, via
+ # Both v2 and v3
+ body = {
+ 'auth': {
+ 'passwordCredentials': {
+ 'userId': self.user2['id'],
+ 'password': self.user2['password']
+ },
+ 'tenantId': self.project2['id']
+ }
+ }
+ self.admin_request(path='/v2.0/tokens', method='POST', body=body)
+
+ auth_data = self.build_authentication_request(
+ user_id=self.user2['id'],
+ password=self.user2['password'],
+ project_id=self.project2['id'])
+ self.v3_authenticate_token(auth_data)
+
+ # Now disable the domain
+ self.domain2['enabled'] = False
+ r = self.patch('/domains/%(domain_id)s' % {
+ 'domain_id': self.domain2['id']},
+ body={'domain': {'enabled': False}})
+ self.assertValidDomainResponse(r, self.domain2)
+
+ # Make sure the user can no longer authenticate, via
+ # either API
+ body = {
+ 'auth': {
+ 'passwordCredentials': {
+ 'userId': self.user2['id'],
+ 'password': self.user2['password']
+ },
+ 'tenantId': self.project2['id']
+ }
+ }
+ self.admin_request(
+ path='/v2.0/tokens', method='POST', body=body, expected_status=401)
+
+ # Try looking up in v3 by name and id
+ auth_data = self.build_authentication_request(
+ user_id=self.user2['id'],
+ password=self.user2['password'],
+ project_id=self.project2['id'])
+ self.v3_authenticate_token(auth_data, expected_status=401)
+
+ auth_data = self.build_authentication_request(
+ username=self.user2['name'],
+ user_domain_id=self.domain2['id'],
+ password=self.user2['password'],
+ project_id=self.project2['id'])
+ self.v3_authenticate_token(auth_data, expected_status=401)
+
+ def test_delete_enabled_domain_fails(self):
+ """Call ``DELETE /domains/{domain_id}`` (when domain enabled)."""
+
+ # Try deleting an enabled domain, which should fail
+ self.delete('/domains/%(domain_id)s' % {
+ 'domain_id': self.domain['id']},
+ expected_status=exception.ForbiddenAction.code)
+
+ def test_delete_domain(self):
+ """Call ``DELETE /domains/{domain_id}``.
+
+ The sample data set up already has a user, group, project
+ and credential that is part of self.domain. Since the user
+ we will authenticate with is in this domain, we create a
+ another set of entities in a second domain. Deleting this
+ second domain should delete all these new entities. In addition,
+ all the entities in the regular self.domain should be unaffected
+ by the delete.
+
+ Test Plan:
+
+ - Create domain2 and a 2nd set of entities
+ - Disable domain2
+ - Delete domain2
+ - Check entities in domain2 have been deleted
+ - Check entities in self.domain are unaffected
+
+ """
+
+ # Create a 2nd set of entities in a 2nd domain
+ self.domain2 = self.new_domain_ref()
+ self.resource_api.create_domain(self.domain2['id'], self.domain2)
+
+ self.project2 = self.new_project_ref(
+ domain_id=self.domain2['id'])
+ self.resource_api.create_project(self.project2['id'], self.project2)
+
+ self.user2 = self.new_user_ref(
+ domain_id=self.domain2['id'],
+ project_id=self.project2['id'])
+ self.user2 = self.identity_api.create_user(self.user2)
+
+ self.group2 = self.new_group_ref(
+ domain_id=self.domain2['id'])
+ self.group2 = self.identity_api.create_group(self.group2)
+
+ self.credential2 = self.new_credential_ref(
+ user_id=self.user2['id'],
+ project_id=self.project2['id'])
+ self.credential_api.create_credential(
+ self.credential2['id'],
+ self.credential2)
+
+ # Now disable the new domain and delete it
+ self.domain2['enabled'] = False
+ r = self.patch('/domains/%(domain_id)s' % {
+ 'domain_id': self.domain2['id']},
+ body={'domain': {'enabled': False}})
+ self.assertValidDomainResponse(r, self.domain2)
+ self.delete('/domains/%(domain_id)s' % {
+ 'domain_id': self.domain2['id']})
+
+ # Check all the domain2 relevant entities are gone
+ self.assertRaises(exception.DomainNotFound,
+ self.resource_api.get_domain,
+ self.domain2['id'])
+ self.assertRaises(exception.ProjectNotFound,
+ self.resource_api.get_project,
+ self.project2['id'])
+ self.assertRaises(exception.GroupNotFound,
+ self.identity_api.get_group,
+ self.group2['id'])
+ self.assertRaises(exception.UserNotFound,
+ self.identity_api.get_user,
+ self.user2['id'])
+ self.assertRaises(exception.CredentialNotFound,
+ self.credential_api.get_credential,
+ self.credential2['id'])
+
+ # ...and that all self.domain entities are still here
+ r = self.resource_api.get_domain(self.domain['id'])
+ self.assertDictEqual(r, self.domain)
+ r = self.resource_api.get_project(self.project['id'])
+ self.assertDictEqual(r, self.project)
+ r = self.identity_api.get_group(self.group['id'])
+ self.assertDictEqual(r, self.group)
+ r = self.identity_api.get_user(self.user['id'])
+ self.user.pop('password')
+ self.assertDictEqual(r, self.user)
+ r = self.credential_api.get_credential(self.credential['id'])
+ self.assertDictEqual(r, self.credential)
+
+ def test_delete_default_domain_fails(self):
+ # Attempting to delete the default domain results in 403 Forbidden.
+
+ # Need to disable it first.
+ self.patch('/domains/%(domain_id)s' % {
+ 'domain_id': CONF.identity.default_domain_id},
+ body={'domain': {'enabled': False}})
+
+ self.delete('/domains/%(domain_id)s' % {
+ 'domain_id': CONF.identity.default_domain_id},
+ expected_status=exception.ForbiddenAction.code)
+
+ def test_delete_new_default_domain_fails(self):
+ # If change the default domain ID, deleting the new default domain
+ # results in a 403 Forbidden.
+
+ # Create a new domain that's not the default
+ new_domain = self.new_domain_ref()
+ new_domain_id = new_domain['id']
+ self.resource_api.create_domain(new_domain_id, new_domain)
+
+ # Disable the new domain so can delete it later.
+ self.patch('/domains/%(domain_id)s' % {
+ 'domain_id': new_domain_id},
+ body={'domain': {'enabled': False}})
+
+ # Change the default domain
+ self.config_fixture.config(group='identity',
+ default_domain_id=new_domain_id)
+
+ # Attempt to delete the new domain
+
+ self.delete('/domains/%(domain_id)s' % {'domain_id': new_domain_id},
+ expected_status=exception.ForbiddenAction.code)
+
+ def test_delete_old_default_domain(self):
+ # If change the default domain ID, deleting the old default domain
+ # works.
+
+ # Create a new domain that's not the default
+ new_domain = self.new_domain_ref()
+ new_domain_id = new_domain['id']
+ self.resource_api.create_domain(new_domain_id, new_domain)
+
+ old_default_domain_id = CONF.identity.default_domain_id
+
+ # Disable the default domain so we can delete it later.
+ self.patch('/domains/%(domain_id)s' % {
+ 'domain_id': old_default_domain_id},
+ body={'domain': {'enabled': False}})
+
+ # Change the default domain
+ self.config_fixture.config(group='identity',
+ default_domain_id=new_domain_id)
+
+ # Delete the old default domain
+
+ self.delete(
+ '/domains/%(domain_id)s' % {'domain_id': old_default_domain_id})
+
+ def test_token_revoked_once_domain_disabled(self):
+ """Test token from a disabled domain has been invalidated.
+
+ Test that a token that was valid for an enabled domain
+ becomes invalid once that domain is disabled.
+
+ """
+
+ self.domain = self.new_domain_ref()
+ self.resource_api.create_domain(self.domain['id'], self.domain)
+
+ self.user2 = self.new_user_ref(domain_id=self.domain['id'])
+ password = self.user2['password']
+ self.user2 = self.identity_api.create_user(self.user2)
+ self.user2['password'] = password
+
+ # build a request body
+ auth_body = self.build_authentication_request(
+ user_id=self.user2['id'],
+ password=self.user2['password'])
+
+ # sends a request for the user's token
+ token_resp = self.post('/auth/tokens', body=auth_body)
+
+ subject_token = token_resp.headers.get('x-subject-token')
+
+ # validates the returned token and it should be valid.
+ self.head('/auth/tokens',
+ headers={'x-subject-token': subject_token},
+ expected_status=200)
+
+ # now disable the domain
+ self.domain['enabled'] = False
+ url = "/domains/%(domain_id)s" % {'domain_id': self.domain['id']}
+ self.patch(url,
+ body={'domain': {'enabled': False}},
+ expected_status=200)
+
+ # validates the same token again and it should be 'not found'
+ # as the domain has already been disabled.
+ self.head('/auth/tokens',
+ headers={'x-subject-token': subject_token},
+ expected_status=404)
+
+ def test_delete_domain_hierarchy(self):
+ """Call ``DELETE /domains/{domain_id}``."""
+ domain = self.new_domain_ref()
+ self.resource_api.create_domain(domain['id'], domain)
+
+ root_project = self.new_project_ref(
+ domain_id=domain['id'])
+ self.resource_api.create_project(root_project['id'], root_project)
+
+ leaf_project = self.new_project_ref(
+ domain_id=domain['id'],
+ parent_id=root_project['id'])
+ self.resource_api.create_project(leaf_project['id'], leaf_project)
+
+ # Need to disable it first.
+ self.patch('/domains/%(domain_id)s' % {
+ 'domain_id': domain['id']},
+ body={'domain': {'enabled': False}})
+
+ self.delete(
+ '/domains/%(domain_id)s' % {
+ 'domain_id': domain['id']})
+
+ self.assertRaises(exception.DomainNotFound,
+ self.resource_api.get_domain,
+ domain['id'])
+
+ self.assertRaises(exception.ProjectNotFound,
+ self.resource_api.get_project,
+ root_project['id'])
+
+ self.assertRaises(exception.ProjectNotFound,
+ self.resource_api.get_project,
+ leaf_project['id'])
+
+ def test_forbid_operations_on_federated_domain(self):
+ """Make sure one cannot operate on federated domain.
+
+ This includes operations like create, update, delete
+ on domain identified by id and name where difference variations of
+ id 'Federated' are used.
+
+ """
+ def create_domains():
+ for variation in ('Federated', 'FEDERATED',
+ 'federated', 'fEderated'):
+ domain = self.new_domain_ref()
+ domain['id'] = variation
+ yield domain
+
+ for domain in create_domains():
+ self.assertRaises(
+ AssertionError, self.assignment_api.create_domain,
+ domain['id'], domain)
+ self.assertRaises(
+ AssertionError, self.assignment_api.update_domain,
+ domain['id'], domain)
+ self.assertRaises(
+ exception.DomainNotFound, self.assignment_api.delete_domain,
+ domain['id'])
+
+ # swap 'name' with 'id' and try again, expecting the request to
+ # gracefully fail
+ domain['id'], domain['name'] = domain['name'], domain['id']
+ self.assertRaises(
+ AssertionError, self.assignment_api.create_domain,
+ domain['id'], domain)
+ self.assertRaises(
+ AssertionError, self.assignment_api.update_domain,
+ domain['id'], domain)
+ self.assertRaises(
+ exception.DomainNotFound, self.assignment_api.delete_domain,
+ domain['id'])
+
+ def test_forbid_operations_on_defined_federated_domain(self):
+ """Make sure one cannot operate on a user-defined federated domain.
+
+ This includes operations like create, update, delete.
+
+ """
+
+ non_default_name = 'beta_federated_domain'
+ self.config_fixture.config(group='federation',
+ federated_domain_name=non_default_name)
+ domain = self.new_domain_ref()
+ domain['name'] = non_default_name
+ self.assertRaises(AssertionError,
+ self.assignment_api.create_domain,
+ domain['id'], domain)
+ self.assertRaises(exception.DomainNotFound,
+ self.assignment_api.delete_domain,
+ domain['id'])
+ self.assertRaises(AssertionError,
+ self.assignment_api.update_domain,
+ domain['id'], domain)
+
+ def test_set_federated_domain_when_config_empty(self):
+ """Make sure we are operable even if config value is not properly
+ set.
+
+ This includes operations like create, update, delete.
+
+ """
+ federated_name = 'Federated'
+ self.config_fixture.config(group='federation',
+ federated_domain_name='')
+ domain = self.new_domain_ref()
+ domain['id'] = federated_name
+ self.assertRaises(AssertionError,
+ self.assignment_api.create_domain,
+ domain['id'], domain)
+ self.assertRaises(exception.DomainNotFound,
+ self.assignment_api.delete_domain,
+ domain['id'])
+ self.assertRaises(AssertionError,
+ self.assignment_api.update_domain,
+ domain['id'], domain)
+
+ # swap id with name
+ domain['id'], domain['name'] = domain['name'], domain['id']
+ self.assertRaises(AssertionError,
+ self.assignment_api.create_domain,
+ domain['id'], domain)
+ self.assertRaises(exception.DomainNotFound,
+ self.assignment_api.delete_domain,
+ domain['id'])
+ self.assertRaises(AssertionError,
+ self.assignment_api.update_domain,
+ domain['id'], domain)
+
+ # Project CRUD tests
+
+ def test_list_projects(self):
+ """Call ``GET /projects``."""
+ resource_url = '/projects'
+ r = self.get(resource_url)
+ self.assertValidProjectListResponse(r, ref=self.project,
+ resource_url=resource_url)
+
+ def test_create_project(self):
+ """Call ``POST /projects``."""
+ ref = self.new_project_ref(domain_id=self.domain_id)
+ r = self.post(
+ '/projects',
+ body={'project': ref})
+ self.assertValidProjectResponse(r, ref)
+
+ def test_create_project_400(self):
+ """Call ``POST /projects``."""
+ self.post('/projects', body={'project': {}}, expected_status=400)
+
+ def _create_projects_hierarchy(self, hierarchy_size=1):
+ """Creates a project hierarchy with specified size.
+
+ :param hierarchy_size: the desired hierarchy size, default is 1 -
+ a project with one child.
+
+ :returns projects: a list of the projects in the created hierarchy.
+
+ """
+ resp = self.get(
+ '/projects/%(project_id)s' % {
+ 'project_id': self.project_id})
+
+ projects = [resp.result]
+
+ for i in range(hierarchy_size):
+ new_ref = self.new_project_ref(
+ domain_id=self.domain_id,
+ parent_id=projects[i]['project']['id'])
+ resp = self.post('/projects',
+ body={'project': new_ref})
+ self.assertValidProjectResponse(resp, new_ref)
+
+ projects.append(resp.result)
+
+ return projects
+
+ def test_create_hierarchical_project(self):
+ """Call ``POST /projects``."""
+ self._create_projects_hierarchy()
+
+ def test_get_project(self):
+ """Call ``GET /projects/{project_id}``."""
+ r = self.get(
+ '/projects/%(project_id)s' % {
+ 'project_id': self.project_id})
+ self.assertValidProjectResponse(r, self.project)
+
+ def test_get_project_with_parents_as_ids(self):
+ """Call ``GET /projects/{project_id}?parents_as_ids``."""
+ projects = self._create_projects_hierarchy(hierarchy_size=2)
+
+ # Query for projects[2] parents_as_ids
+ r = self.get(
+ '/projects/%(project_id)s?parents_as_ids' % {
+ 'project_id': projects[2]['project']['id']})
+
+ self.assertValidProjectResponse(r, projects[2]['project'])
+ parents_as_ids = r.result['project']['parents']
+
+ # Assert parents_as_ids is a structured dictionary correctly
+ # representing the hierarchy. The request was made using projects[2]
+ # id, hence its parents should be projects[1] and projects[0]. It
+ # should have the following structure:
+ # {
+ # projects[1]: {
+ # projects[0]: None
+ # }
+ # }
+ expected_dict = {
+ projects[1]['project']['id']: {
+ projects[0]['project']['id']: None
+ }
+ }
+ self.assertDictEqual(expected_dict, parents_as_ids)
+
+ # Query for projects[0] parents_as_ids
+ r = self.get(
+ '/projects/%(project_id)s?parents_as_ids' % {
+ 'project_id': projects[0]['project']['id']})
+
+ self.assertValidProjectResponse(r, projects[0]['project'])
+ parents_as_ids = r.result['project']['parents']
+
+ # projects[0] has no parents, parents_as_ids must be None
+ self.assertIsNone(parents_as_ids)
+
+ def test_get_project_with_parents_as_list(self):
+ """Call ``GET /projects/{project_id}?parents_as_list``."""
+ projects = self._create_projects_hierarchy(hierarchy_size=2)
+
+ r = self.get(
+ '/projects/%(project_id)s?parents_as_list' % {
+ 'project_id': projects[1]['project']['id']})
+
+ self.assertEqual(1, len(r.result['project']['parents']))
+ self.assertValidProjectResponse(r, projects[1]['project'])
+ self.assertIn(projects[0], r.result['project']['parents'])
+ self.assertNotIn(projects[2], r.result['project']['parents'])
+
+ def test_get_project_with_parents_as_list_and_parents_as_ids(self):
+ """Call ``GET /projects/{project_id}?parents_as_list&parents_as_ids``.
+
+ """
+ projects = self._create_projects_hierarchy(hierarchy_size=2)
+
+ self.get(
+ '/projects/%(project_id)s?parents_as_list&parents_as_ids' % {
+ 'project_id': projects[1]['project']['id']},
+ expected_status=400)
+
+ def test_get_project_with_subtree_as_ids(self):
+ """Call ``GET /projects/{project_id}?subtree_as_ids``.
+
+ This test creates a more complex hierarchy to test if the structured
+ dictionary returned by using the ``subtree_as_ids`` query param
+ correctly represents the hierarchy.
+
+ The hierarchy contains 5 projects with the following structure::
+
+ +--A--+
+ | |
+ +--B--+ C
+ | |
+ D E
+
+
+ """
+ projects = self._create_projects_hierarchy(hierarchy_size=2)
+
+ # Add another child to projects[0] - it will be projects[3]
+ new_ref = self.new_project_ref(
+ domain_id=self.domain_id,
+ parent_id=projects[0]['project']['id'])
+ resp = self.post('/projects',
+ body={'project': new_ref})
+ self.assertValidProjectResponse(resp, new_ref)
+ projects.append(resp.result)
+
+ # Add another child to projects[1] - it will be projects[4]
+ new_ref = self.new_project_ref(
+ domain_id=self.domain_id,
+ parent_id=projects[1]['project']['id'])
+ resp = self.post('/projects',
+ body={'project': new_ref})
+ self.assertValidProjectResponse(resp, new_ref)
+ projects.append(resp.result)
+
+ # Query for projects[0] subtree_as_ids
+ r = self.get(
+ '/projects/%(project_id)s?subtree_as_ids' % {
+ 'project_id': projects[0]['project']['id']})
+ self.assertValidProjectResponse(r, projects[0]['project'])
+ subtree_as_ids = r.result['project']['subtree']
+
+ # The subtree hierarchy from projects[0] should have the following
+ # structure:
+ # {
+ # projects[1]: {
+ # projects[2]: None,
+ # projects[4]: None
+ # },
+ # projects[3]: None
+ # }
+ expected_dict = {
+ projects[1]['project']['id']: {
+ projects[2]['project']['id']: None,
+ projects[4]['project']['id']: None
+ },
+ projects[3]['project']['id']: None
+ }
+ self.assertDictEqual(expected_dict, subtree_as_ids)
+
+ # Now query for projects[1] subtree_as_ids
+ r = self.get(
+ '/projects/%(project_id)s?subtree_as_ids' % {
+ 'project_id': projects[1]['project']['id']})
+ self.assertValidProjectResponse(r, projects[1]['project'])
+ subtree_as_ids = r.result['project']['subtree']
+
+ # The subtree hierarchy from projects[1] should have the following
+ # structure:
+ # {
+ # projects[2]: None,
+ # projects[4]: None
+ # }
+ expected_dict = {
+ projects[2]['project']['id']: None,
+ projects[4]['project']['id']: None
+ }
+ self.assertDictEqual(expected_dict, subtree_as_ids)
+
+ # Now query for projects[3] subtree_as_ids
+ r = self.get(
+ '/projects/%(project_id)s?subtree_as_ids' % {
+ 'project_id': projects[3]['project']['id']})
+ self.assertValidProjectResponse(r, projects[3]['project'])
+ subtree_as_ids = r.result['project']['subtree']
+
+ # projects[3] has no subtree, subtree_as_ids must be None
+ self.assertIsNone(subtree_as_ids)
+
+ def test_get_project_with_subtree_as_list(self):
+ """Call ``GET /projects/{project_id}?subtree_as_list``."""
+ projects = self._create_projects_hierarchy(hierarchy_size=2)
+
+ r = self.get(
+ '/projects/%(project_id)s?subtree_as_list' % {
+ 'project_id': projects[1]['project']['id']})
+
+ self.assertEqual(1, len(r.result['project']['subtree']))
+ self.assertValidProjectResponse(r, projects[1]['project'])
+ self.assertNotIn(projects[0], r.result['project']['subtree'])
+ self.assertIn(projects[2], r.result['project']['subtree'])
+
+ def test_get_project_with_subtree_as_list_and_subtree_as_ids(self):
+ """Call ``GET /projects/{project_id}?subtree_as_list&subtree_as_ids``.
+
+ """
+ projects = self._create_projects_hierarchy(hierarchy_size=2)
+
+ self.get(
+ '/projects/%(project_id)s?subtree_as_list&subtree_as_ids' % {
+ 'project_id': projects[1]['project']['id']},
+ expected_status=400)
+
+ def test_update_project(self):
+ """Call ``PATCH /projects/{project_id}``."""
+ ref = self.new_project_ref(domain_id=self.domain_id)
+ del ref['id']
+ r = self.patch(
+ '/projects/%(project_id)s' % {
+ 'project_id': self.project_id},
+ body={'project': ref})
+ self.assertValidProjectResponse(r, ref)
+
+ def test_update_project_domain_id(self):
+ """Call ``PATCH /projects/{project_id}`` with domain_id."""
+ project = self.new_project_ref(domain_id=self.domain['id'])
+ self.resource_api.create_project(project['id'], project)
+ project['domain_id'] = CONF.identity.default_domain_id
+ r = self.patch('/projects/%(project_id)s' % {
+ 'project_id': project['id']},
+ body={'project': project},
+ expected_status=exception.ValidationError.code)
+ self.config_fixture.config(domain_id_immutable=False)
+ project['domain_id'] = self.domain['id']
+ r = self.patch('/projects/%(project_id)s' % {
+ 'project_id': project['id']},
+ body={'project': project})
+ self.assertValidProjectResponse(r, project)
+
+ def test_update_project_parent_id(self):
+ """Call ``PATCH /projects/{project_id}``."""
+ projects = self._create_projects_hierarchy()
+ leaf_project = projects[1]['project']
+ leaf_project['parent_id'] = None
+ self.patch(
+ '/projects/%(project_id)s' % {
+ 'project_id': leaf_project['id']},
+ body={'project': leaf_project},
+ expected_status=403)
+
+ def test_disable_leaf_project(self):
+ """Call ``PATCH /projects/{project_id}``."""
+ projects = self._create_projects_hierarchy()
+ leaf_project = projects[1]['project']
+ leaf_project['enabled'] = False
+ r = self.patch(
+ '/projects/%(project_id)s' % {
+ 'project_id': leaf_project['id']},
+ body={'project': leaf_project})
+ self.assertEqual(
+ leaf_project['enabled'], r.result['project']['enabled'])
+
+ def test_disable_not_leaf_project(self):
+ """Call ``PATCH /projects/{project_id}``."""
+ projects = self._create_projects_hierarchy()
+ root_project = projects[0]['project']
+ root_project['enabled'] = False
+ self.patch(
+ '/projects/%(project_id)s' % {
+ 'project_id': root_project['id']},
+ body={'project': root_project},
+ expected_status=403)
+
+ def test_delete_project(self):
+ """Call ``DELETE /projects/{project_id}``
+
+ As well as making sure the delete succeeds, we ensure
+ that any credentials that reference this projects are
+ also deleted, while other credentials are unaffected.
+
+ """
+ # First check the credential for this project is present
+ r = self.credential_api.get_credential(self.credential['id'])
+ self.assertDictEqual(r, self.credential)
+ # Create a second credential with a different project
+ self.project2 = self.new_project_ref(
+ domain_id=self.domain['id'])
+ self.resource_api.create_project(self.project2['id'], self.project2)
+ self.credential2 = self.new_credential_ref(
+ user_id=self.user['id'],
+ project_id=self.project2['id'])
+ self.credential_api.create_credential(
+ self.credential2['id'],
+ self.credential2)
+
+ # Now delete the project
+ self.delete(
+ '/projects/%(project_id)s' % {
+ 'project_id': self.project_id})
+
+ # Deleting the project should have deleted any credentials
+ # that reference this project
+ self.assertRaises(exception.CredentialNotFound,
+ self.credential_api.get_credential,
+ credential_id=self.credential['id'])
+ # But the credential for project2 is unaffected
+ r = self.credential_api.get_credential(self.credential2['id'])
+ self.assertDictEqual(r, self.credential2)
+
+ def test_delete_not_leaf_project(self):
+ """Call ``DELETE /projects/{project_id}``."""
+ self._create_projects_hierarchy()
+ self.delete(
+ '/projects/%(project_id)s' % {
+ 'project_id': self.project_id},
+ expected_status=403)
+
+ # Role CRUD tests
+
+ def test_create_role(self):
+ """Call ``POST /roles``."""
+ ref = self.new_role_ref()
+ r = self.post(
+ '/roles',
+ body={'role': ref})
+ return self.assertValidRoleResponse(r, ref)
+
+ def test_create_role_400(self):
+ """Call ``POST /roles``."""
+ self.post('/roles', body={'role': {}}, expected_status=400)
+
+ def test_list_roles(self):
+ """Call ``GET /roles``."""
+ resource_url = '/roles'
+ r = self.get(resource_url)
+ self.assertValidRoleListResponse(r, ref=self.role,
+ resource_url=resource_url)
+
+ def test_get_role(self):
+ """Call ``GET /roles/{role_id}``."""
+ r = self.get('/roles/%(role_id)s' % {
+ 'role_id': self.role_id})
+ self.assertValidRoleResponse(r, self.role)
+
+ def test_update_role(self):
+ """Call ``PATCH /roles/{role_id}``."""
+ ref = self.new_role_ref()
+ del ref['id']
+ r = self.patch('/roles/%(role_id)s' % {
+ 'role_id': self.role_id},
+ body={'role': ref})
+ self.assertValidRoleResponse(r, ref)
+
+ def test_delete_role(self):
+ """Call ``DELETE /roles/{role_id}``."""
+ self.delete('/roles/%(role_id)s' % {
+ 'role_id': self.role_id})
+
+ # Role Grants tests
+
+ def test_crud_user_project_role_grants(self):
+ collection_url = (
+ '/projects/%(project_id)s/users/%(user_id)s/roles' % {
+ 'project_id': self.project['id'],
+ 'user_id': self.user['id']})
+ member_url = '%(collection_url)s/%(role_id)s' % {
+ 'collection_url': collection_url,
+ 'role_id': self.role_id}
+
+ self.put(member_url)
+ self.head(member_url)
+ r = self.get(collection_url)
+ self.assertValidRoleListResponse(r, ref=self.role,
+ resource_url=collection_url)
+
+ # FIXME(gyee): this test is no longer valid as user
+ # have no role in the project. Can't get a scoped token
+ # self.delete(member_url)
+ # r = self.get(collection_url)
+ # self.assertValidRoleListResponse(r, expected_length=0)
+ # self.assertIn(collection_url, r.result['links']['self'])
+
+ def test_crud_user_project_role_grants_no_user(self):
+ """Grant role on a project to a user that doesn't exist, 404 result.
+
+ When grant a role on a project to a user that doesn't exist, the server
+ returns 404 Not Found for the user.
+
+ """
+
+ user_id = uuid.uuid4().hex
+
+ collection_url = (
+ '/projects/%(project_id)s/users/%(user_id)s/roles' % {
+ 'project_id': self.project['id'], 'user_id': user_id})
+ member_url = '%(collection_url)s/%(role_id)s' % {
+ 'collection_url': collection_url,
+ 'role_id': self.role_id}
+
+ self.put(member_url, expected_status=404)
+
+ def test_crud_user_domain_role_grants(self):
+ collection_url = (
+ '/domains/%(domain_id)s/users/%(user_id)s/roles' % {
+ 'domain_id': self.domain_id,
+ 'user_id': self.user['id']})
+ member_url = '%(collection_url)s/%(role_id)s' % {
+ 'collection_url': collection_url,
+ 'role_id': self.role_id}
+
+ self.put(member_url)
+ self.head(member_url)
+ r = self.get(collection_url)
+ self.assertValidRoleListResponse(r, ref=self.role,
+ resource_url=collection_url)
+
+ self.delete(member_url)
+ r = self.get(collection_url)
+ self.assertValidRoleListResponse(r, expected_length=0,
+ resource_url=collection_url)
+
+ def test_crud_user_domain_role_grants_no_user(self):
+ """Grant role on a domain to a user that doesn't exist, 404 result.
+
+ When grant a role on a domain to a user that doesn't exist, the server
+ returns 404 Not Found for the user.
+
+ """
+
+ user_id = uuid.uuid4().hex
+
+ collection_url = (
+ '/domains/%(domain_id)s/users/%(user_id)s/roles' % {
+ 'domain_id': self.domain_id, 'user_id': user_id})
+ member_url = '%(collection_url)s/%(role_id)s' % {
+ 'collection_url': collection_url,
+ 'role_id': self.role_id}
+
+ self.put(member_url, expected_status=404)
+
+ def test_crud_group_project_role_grants(self):
+ collection_url = (
+ '/projects/%(project_id)s/groups/%(group_id)s/roles' % {
+ 'project_id': self.project_id,
+ 'group_id': self.group_id})
+ member_url = '%(collection_url)s/%(role_id)s' % {
+ 'collection_url': collection_url,
+ 'role_id': self.role_id}
+
+ self.put(member_url)
+ self.head(member_url)
+ r = self.get(collection_url)
+ self.assertValidRoleListResponse(r, ref=self.role,
+ resource_url=collection_url)
+
+ self.delete(member_url)
+ r = self.get(collection_url)
+ self.assertValidRoleListResponse(r, expected_length=0,
+ resource_url=collection_url)
+
+ def test_crud_group_project_role_grants_no_group(self):
+ """Grant role on a project to a group that doesn't exist, 404 result.
+
+ When grant a role on a project to a group that doesn't exist, the
+ server returns 404 Not Found for the group.
+
+ """
+
+ group_id = uuid.uuid4().hex
+
+ collection_url = (
+ '/projects/%(project_id)s/groups/%(group_id)s/roles' % {
+ 'project_id': self.project_id,
+ 'group_id': group_id})
+ member_url = '%(collection_url)s/%(role_id)s' % {
+ 'collection_url': collection_url,
+ 'role_id': self.role_id}
+
+ self.put(member_url, expected_status=404)
+
+ def test_crud_group_domain_role_grants(self):
+ collection_url = (
+ '/domains/%(domain_id)s/groups/%(group_id)s/roles' % {
+ 'domain_id': self.domain_id,
+ 'group_id': self.group_id})
+ member_url = '%(collection_url)s/%(role_id)s' % {
+ 'collection_url': collection_url,
+ 'role_id': self.role_id}
+
+ self.put(member_url)
+ self.head(member_url)
+ r = self.get(collection_url)
+ self.assertValidRoleListResponse(r, ref=self.role,
+ resource_url=collection_url)
+
+ self.delete(member_url)
+ r = self.get(collection_url)
+ self.assertValidRoleListResponse(r, expected_length=0,
+ resource_url=collection_url)
+
+ def test_crud_group_domain_role_grants_no_group(self):
+ """Grant role on a domain to a group that doesn't exist, 404 result.
+
+ When grant a role on a domain to a group that doesn't exist, the server
+ returns 404 Not Found for the group.
+
+ """
+
+ group_id = uuid.uuid4().hex
+
+ collection_url = (
+ '/domains/%(domain_id)s/groups/%(group_id)s/roles' % {
+ 'domain_id': self.domain_id,
+ 'group_id': group_id})
+ member_url = '%(collection_url)s/%(role_id)s' % {
+ 'collection_url': collection_url,
+ 'role_id': self.role_id}
+
+ self.put(member_url, expected_status=404)
+
+ def _create_new_user_and_assign_role_on_project(self):
+ """Create a new user and assign user a role on a project."""
+ # Create a new user
+ new_user = self.new_user_ref(domain_id=self.domain_id)
+ user_ref = self.identity_api.create_user(new_user)
+ # Assign the user a role on the project
+ collection_url = (
+ '/projects/%(project_id)s/users/%(user_id)s/roles' % {
+ 'project_id': self.project_id,
+ 'user_id': user_ref['id']})
+ member_url = ('%(collection_url)s/%(role_id)s' % {
+ 'collection_url': collection_url,
+ 'role_id': self.role_id})
+ self.put(member_url, expected_status=204)
+ # Check the user has the role assigned
+ self.head(member_url, expected_status=204)
+ return member_url, user_ref
+
+ def test_delete_user_before_removing_role_assignment_succeeds(self):
+ """Call ``DELETE`` on the user before the role assignment."""
+ member_url, user = self._create_new_user_and_assign_role_on_project()
+ # Delete the user from identity backend
+ self.identity_api.driver.delete_user(user['id'])
+ # Clean up the role assignment
+ self.delete(member_url, expected_status=204)
+ # Make sure the role is gone
+ self.head(member_url, expected_status=404)
+
+ def test_delete_user_and_check_role_assignment_fails(self):
+ """Call ``DELETE`` on the user and check the role assignment."""
+ member_url, user = self._create_new_user_and_assign_role_on_project()
+ # Delete the user from identity backend
+ self.identity_api.delete_user(user['id'])
+ # We should get a 404 when looking for the user in the identity
+ # backend because we're not performing a delete operation on the role.
+ self.head(member_url, expected_status=404)
+
+ def test_token_revoked_once_group_role_grant_revoked(self):
+ """Test token is revoked when group role grant is revoked
+
+ When a role granted to a group is revoked for a given scope,
+ all tokens related to this scope and belonging to one of the members
+ of this group should be revoked.
+
+ The revocation should be independently to the presence
+ of the revoke API.
+ """
+ # creates grant from group on project.
+ self.assignment_api.create_grant(role_id=self.role['id'],
+ project_id=self.project['id'],
+ group_id=self.group['id'])
+
+ # adds user to the group.
+ self.identity_api.add_user_to_group(user_id=self.user['id'],
+ group_id=self.group['id'])
+
+ # creates a token for the user
+ auth_body = self.build_authentication_request(
+ user_id=self.user['id'],
+ password=self.user['password'],
+ project_id=self.project['id'])
+ token_resp = self.post('/auth/tokens', body=auth_body)
+ token = token_resp.headers.get('x-subject-token')
+
+ # validates the returned token; it should be valid.
+ self.head('/auth/tokens',
+ headers={'x-subject-token': token},
+ expected_status=200)
+
+ # revokes the grant from group on project.
+ self.assignment_api.delete_grant(role_id=self.role['id'],
+ project_id=self.project['id'],
+ group_id=self.group['id'])
+
+ # validates the same token again; it should not longer be valid.
+ self.head('/auth/tokens',
+ headers={'x-subject-token': token},
+ expected_status=404)
+
+ # Role Assignments tests
+
+ def test_get_role_assignments(self):
+ """Call ``GET /role_assignments``.
+
+ The sample data set up already has a user, group and project
+ that is part of self.domain. We use these plus a new user
+ we create as our data set, making sure we ignore any
+ role assignments that are already in existence.
+
+ Since we don't yet support a first class entity for role
+ assignments, we are only testing the LIST API. To create
+ and delete the role assignments we use the old grant APIs.
+
+ Test Plan:
+
+ - Create extra user for tests
+ - Get a list of all existing role assignments
+ - Add a new assignment for each of the four combinations, i.e.
+ group+domain, user+domain, group+project, user+project, using
+ the same role each time
+ - Get a new list of all role assignments, checking these four new
+ ones have been added
+ - Then delete the four we added
+ - Get a new list of all role assignments, checking the four have
+ been removed
+
+ """
+
+ # Since the default fixtures already assign some roles to the
+ # user it creates, we also need a new user that will not have any
+ # existing assignments
+ self.user1 = self.new_user_ref(
+ domain_id=self.domain['id'])
+ self.user1 = self.identity_api.create_user(self.user1)
+
+ collection_url = '/role_assignments'
+ r = self.get(collection_url)
+ self.assertValidRoleAssignmentListResponse(r,
+ resource_url=collection_url)
+ existing_assignments = len(r.result.get('role_assignments'))
+
+ # Now add one of each of the four types of assignment, making sure
+ # that we get them all back.
+ gd_entity = _build_role_assignment_entity(domain_id=self.domain_id,
+ group_id=self.group_id,
+ role_id=self.role_id)
+ self.put(gd_entity['links']['assignment'])
+ r = self.get(collection_url)
+ self.assertValidRoleAssignmentListResponse(
+ r,
+ expected_length=existing_assignments + 1,
+ resource_url=collection_url)
+ self.assertRoleAssignmentInListResponse(r, gd_entity)
+
+ ud_entity = _build_role_assignment_entity(domain_id=self.domain_id,
+ user_id=self.user1['id'],
+ role_id=self.role_id)
+ self.put(ud_entity['links']['assignment'])
+ r = self.get(collection_url)
+ self.assertValidRoleAssignmentListResponse(
+ r,
+ expected_length=existing_assignments + 2,
+ resource_url=collection_url)
+ self.assertRoleAssignmentInListResponse(r, ud_entity)
+
+ gp_entity = _build_role_assignment_entity(project_id=self.project_id,
+ group_id=self.group_id,
+ role_id=self.role_id)
+ self.put(gp_entity['links']['assignment'])
+ r = self.get(collection_url)
+ self.assertValidRoleAssignmentListResponse(
+ r,
+ expected_length=existing_assignments + 3,
+ resource_url=collection_url)
+ self.assertRoleAssignmentInListResponse(r, gp_entity)
+
+ up_entity = _build_role_assignment_entity(project_id=self.project_id,
+ user_id=self.user1['id'],
+ role_id=self.role_id)
+ self.put(up_entity['links']['assignment'])
+ r = self.get(collection_url)
+ self.assertValidRoleAssignmentListResponse(
+ r,
+ expected_length=existing_assignments + 4,
+ resource_url=collection_url)
+ self.assertRoleAssignmentInListResponse(r, up_entity)
+
+ # Now delete the four we added and make sure they are removed
+ # from the collection.
+
+ self.delete(gd_entity['links']['assignment'])
+ self.delete(ud_entity['links']['assignment'])
+ self.delete(gp_entity['links']['assignment'])
+ self.delete(up_entity['links']['assignment'])
+ r = self.get(collection_url)
+ self.assertValidRoleAssignmentListResponse(
+ r,
+ expected_length=existing_assignments,
+ resource_url=collection_url)
+ self.assertRoleAssignmentNotInListResponse(r, gd_entity)
+ self.assertRoleAssignmentNotInListResponse(r, ud_entity)
+ self.assertRoleAssignmentNotInListResponse(r, gp_entity)
+ self.assertRoleAssignmentNotInListResponse(r, up_entity)
+
+ def test_get_effective_role_assignments(self):
+ """Call ``GET /role_assignments?effective``.
+
+ Test Plan:
+
+ - Create two extra user for tests
+ - Add these users to a group
+ - Add a role assignment for the group on a domain
+ - Get a list of all role assignments, checking one has been added
+ - Then get a list of all effective role assignments - the group
+ assignment should have turned into assignments on the domain
+ for each of the group members.
+
+ """
+ self.user1 = self.new_user_ref(
+ domain_id=self.domain['id'])
+ password = self.user1['password']
+ self.user1 = self.identity_api.create_user(self.user1)
+ self.user1['password'] = password
+ self.user2 = self.new_user_ref(
+ domain_id=self.domain['id'])
+ password = self.user2['password']
+ self.user2 = self.identity_api.create_user(self.user2)
+ self.user2['password'] = password
+ self.identity_api.add_user_to_group(self.user1['id'], self.group['id'])
+ self.identity_api.add_user_to_group(self.user2['id'], self.group['id'])
+
+ collection_url = '/role_assignments'
+ r = self.get(collection_url)
+ self.assertValidRoleAssignmentListResponse(r,
+ resource_url=collection_url)
+ existing_assignments = len(r.result.get('role_assignments'))
+
+ gd_entity = _build_role_assignment_entity(domain_id=self.domain_id,
+ group_id=self.group_id,
+ role_id=self.role_id)
+ self.put(gd_entity['links']['assignment'])
+ r = self.get(collection_url)
+ self.assertValidRoleAssignmentListResponse(
+ r,
+ expected_length=existing_assignments + 1,
+ resource_url=collection_url)
+ self.assertRoleAssignmentInListResponse(r, gd_entity)
+
+ # Now re-read the collection asking for effective roles - this
+ # should mean the group assignment is translated into the two
+ # member user assignments
+ collection_url = '/role_assignments?effective'
+ r = self.get(collection_url)
+ self.assertValidRoleAssignmentListResponse(
+ r,
+ expected_length=existing_assignments + 2,
+ resource_url=collection_url)
+ ud_entity = _build_role_assignment_entity(
+ link=gd_entity['links']['assignment'], domain_id=self.domain_id,
+ user_id=self.user1['id'], role_id=self.role_id)
+ self.assertRoleAssignmentInListResponse(r, ud_entity)
+ ud_entity = _build_role_assignment_entity(
+ link=gd_entity['links']['assignment'], domain_id=self.domain_id,
+ user_id=self.user2['id'], role_id=self.role_id)
+ self.assertRoleAssignmentInListResponse(r, ud_entity)
+
+ def test_check_effective_values_for_role_assignments(self):
+ """Call ``GET /role_assignments?effective=value``.
+
+ Check the various ways of specifying the 'effective'
+ query parameter. If the 'effective' query parameter
+ is included then this should always be treated as meaning 'True'
+ unless it is specified as:
+
+ {url}?effective=0
+
+ This is by design to match the agreed way of handling
+ policy checking on query/filter parameters.
+
+ Test Plan:
+
+ - Create two extra user for tests
+ - Add these users to a group
+ - Add a role assignment for the group on a domain
+ - Get a list of all role assignments, checking one has been added
+ - Then issue various request with different ways of defining
+ the 'effective' query parameter. As we have tested the
+ correctness of the data coming back when we get effective roles
+ in other tests, here we just use the count of entities to
+ know if we are getting effective roles or not
+
+ """
+ self.user1 = self.new_user_ref(
+ domain_id=self.domain['id'])
+ password = self.user1['password']
+ self.user1 = self.identity_api.create_user(self.user1)
+ self.user1['password'] = password
+ self.user2 = self.new_user_ref(
+ domain_id=self.domain['id'])
+ password = self.user2['password']
+ self.user2 = self.identity_api.create_user(self.user2)
+ self.user2['password'] = password
+ self.identity_api.add_user_to_group(self.user1['id'], self.group['id'])
+ self.identity_api.add_user_to_group(self.user2['id'], self.group['id'])
+
+ collection_url = '/role_assignments'
+ r = self.get(collection_url)
+ self.assertValidRoleAssignmentListResponse(r,
+ resource_url=collection_url)
+ existing_assignments = len(r.result.get('role_assignments'))
+
+ gd_entity = _build_role_assignment_entity(domain_id=self.domain_id,
+ group_id=self.group_id,
+ role_id=self.role_id)
+ self.put(gd_entity['links']['assignment'])
+ r = self.get(collection_url)
+ self.assertValidRoleAssignmentListResponse(
+ r,
+ expected_length=existing_assignments + 1,
+ resource_url=collection_url)
+ self.assertRoleAssignmentInListResponse(r, gd_entity)
+
+ # Now re-read the collection asking for effective roles,
+ # using the most common way of defining "effective'. This
+ # should mean the group assignment is translated into the two
+ # member user assignments
+ collection_url = '/role_assignments?effective'
+ r = self.get(collection_url)
+ self.assertValidRoleAssignmentListResponse(
+ r,
+ expected_length=existing_assignments + 2,
+ resource_url=collection_url)
+ # Now set 'effective' to false explicitly - should get
+ # back the regular roles
+ collection_url = '/role_assignments?effective=0'
+ r = self.get(collection_url)
+ self.assertValidRoleAssignmentListResponse(
+ r,
+ expected_length=existing_assignments + 1,
+ resource_url=collection_url)
+ # Now try setting 'effective' to 'False' explicitly- this is
+ # NOT supported as a way of setting a query or filter
+ # parameter to false by design. Hence we should get back
+ # effective roles.
+ collection_url = '/role_assignments?effective=False'
+ r = self.get(collection_url)
+ self.assertValidRoleAssignmentListResponse(
+ r,
+ expected_length=existing_assignments + 2,
+ resource_url=collection_url)
+ # Now set 'effective' to True explicitly
+ collection_url = '/role_assignments?effective=True'
+ r = self.get(collection_url)
+ self.assertValidRoleAssignmentListResponse(
+ r,
+ expected_length=existing_assignments + 2,
+ resource_url=collection_url)
+
+ def test_filtered_role_assignments(self):
+ """Call ``GET /role_assignments?filters``.
+
+ Test Plan:
+
+ - Create extra users, group, role and project for tests
+ - Make the following assignments:
+ Give group1, role1 on project1 and domain
+ Give user1, role2 on project1 and domain
+ Make User1 a member of Group1
+ - Test a series of single filter list calls, checking that
+ the correct results are obtained
+ - Test a multi-filtered list call
+ - Test listing all effective roles for a given user
+ - Test the equivalent of the list of roles in a project scoped
+ token (all effective roles for a user on a project)
+
+ """
+
+ # Since the default fixtures already assign some roles to the
+ # user it creates, we also need a new user that will not have any
+ # existing assignments
+ self.user1 = self.new_user_ref(
+ domain_id=self.domain['id'])
+ password = self.user1['password']
+ self.user1 = self.identity_api.create_user(self.user1)
+ self.user1['password'] = password
+ self.user2 = self.new_user_ref(
+ domain_id=self.domain['id'])
+ password = self.user2['password']
+ self.user2 = self.identity_api.create_user(self.user2)
+ self.user2['password'] = password
+ self.group1 = self.new_group_ref(
+ domain_id=self.domain['id'])
+ self.group1 = self.identity_api.create_group(self.group1)
+ self.identity_api.add_user_to_group(self.user1['id'],
+ self.group1['id'])
+ self.identity_api.add_user_to_group(self.user2['id'],
+ self.group1['id'])
+ self.project1 = self.new_project_ref(
+ domain_id=self.domain['id'])
+ self.resource_api.create_project(self.project1['id'], self.project1)
+ self.role1 = self.new_role_ref()
+ self.role_api.create_role(self.role1['id'], self.role1)
+ self.role2 = self.new_role_ref()
+ self.role_api.create_role(self.role2['id'], self.role2)
+
+ # Now add one of each of the four types of assignment
+
+ gd_entity = _build_role_assignment_entity(domain_id=self.domain_id,
+ group_id=self.group1['id'],
+ role_id=self.role1['id'])
+ self.put(gd_entity['links']['assignment'])
+
+ ud_entity = _build_role_assignment_entity(domain_id=self.domain_id,
+ user_id=self.user1['id'],
+ role_id=self.role2['id'])
+ self.put(ud_entity['links']['assignment'])
+
+ gp_entity = _build_role_assignment_entity(
+ project_id=self.project1['id'], group_id=self.group1['id'],
+ role_id=self.role1['id'])
+ self.put(gp_entity['links']['assignment'])
+
+ up_entity = _build_role_assignment_entity(
+ project_id=self.project1['id'], user_id=self.user1['id'],
+ role_id=self.role2['id'])
+ self.put(up_entity['links']['assignment'])
+
+ # Now list by various filters to make sure we get back the right ones
+
+ collection_url = ('/role_assignments?scope.project.id=%s' %
+ self.project1['id'])
+ r = self.get(collection_url)
+ self.assertValidRoleAssignmentListResponse(r,
+ expected_length=2,
+ resource_url=collection_url)
+ self.assertRoleAssignmentInListResponse(r, up_entity)
+ self.assertRoleAssignmentInListResponse(r, gp_entity)
+
+ collection_url = ('/role_assignments?scope.domain.id=%s' %
+ self.domain['id'])
+ r = self.get(collection_url)
+ self.assertValidRoleAssignmentListResponse(r,
+ expected_length=2,
+ resource_url=collection_url)
+ self.assertRoleAssignmentInListResponse(r, ud_entity)
+ self.assertRoleAssignmentInListResponse(r, gd_entity)
+
+ collection_url = '/role_assignments?user.id=%s' % self.user1['id']
+ r = self.get(collection_url)
+ self.assertValidRoleAssignmentListResponse(r,
+ expected_length=2,
+ resource_url=collection_url)
+ self.assertRoleAssignmentInListResponse(r, up_entity)
+ self.assertRoleAssignmentInListResponse(r, ud_entity)
+
+ collection_url = '/role_assignments?group.id=%s' % self.group1['id']
+ r = self.get(collection_url)
+ self.assertValidRoleAssignmentListResponse(r,
+ expected_length=2,
+ resource_url=collection_url)
+ self.assertRoleAssignmentInListResponse(r, gd_entity)
+ self.assertRoleAssignmentInListResponse(r, gp_entity)
+
+ collection_url = '/role_assignments?role.id=%s' % self.role1['id']
+ r = self.get(collection_url)
+ self.assertValidRoleAssignmentListResponse(r,
+ expected_length=2,
+ resource_url=collection_url)
+ self.assertRoleAssignmentInListResponse(r, gd_entity)
+ self.assertRoleAssignmentInListResponse(r, gp_entity)
+
+ # Let's try combining two filers together....
+
+ collection_url = (
+ '/role_assignments?user.id=%(user_id)s'
+ '&scope.project.id=%(project_id)s' % {
+ 'user_id': self.user1['id'],
+ 'project_id': self.project1['id']})
+ r = self.get(collection_url)
+ self.assertValidRoleAssignmentListResponse(r,
+ expected_length=1,
+ resource_url=collection_url)
+ self.assertRoleAssignmentInListResponse(r, up_entity)
+
+ # Now for a harder one - filter for user with effective
+ # roles - this should return role assignment that were directly
+ # assigned as well as by virtue of group membership
+
+ collection_url = ('/role_assignments?effective&user.id=%s' %
+ self.user1['id'])
+ r = self.get(collection_url)
+ self.assertValidRoleAssignmentListResponse(r,
+ expected_length=4,
+ resource_url=collection_url)
+ # Should have the two direct roles...
+ self.assertRoleAssignmentInListResponse(r, up_entity)
+ self.assertRoleAssignmentInListResponse(r, ud_entity)
+ # ...and the two via group membership...
+ gp1_link = _build_role_assignment_link(project_id=self.project1['id'],
+ group_id=self.group1['id'],
+ role_id=self.role1['id'])
+ gd1_link = _build_role_assignment_link(domain_id=self.domain_id,
+ group_id=self.group1['id'],
+ role_id=self.role1['id'])
+
+ up1_entity = _build_role_assignment_entity(
+ link=gp1_link, project_id=self.project1['id'],
+ user_id=self.user1['id'], role_id=self.role1['id'])
+ ud1_entity = _build_role_assignment_entity(
+ link=gd1_link, domain_id=self.domain_id, user_id=self.user1['id'],
+ role_id=self.role1['id'])
+ self.assertRoleAssignmentInListResponse(r, up1_entity)
+ self.assertRoleAssignmentInListResponse(r, ud1_entity)
+
+ # ...and for the grand-daddy of them all, simulate the request
+ # that would generate the list of effective roles in a project
+ # scoped token.
+
+ collection_url = (
+ '/role_assignments?effective&user.id=%(user_id)s'
+ '&scope.project.id=%(project_id)s' % {
+ 'user_id': self.user1['id'],
+ 'project_id': self.project1['id']})
+ r = self.get(collection_url)
+ self.assertValidRoleAssignmentListResponse(r,
+ expected_length=2,
+ resource_url=collection_url)
+ # Should have one direct role and one from group membership...
+ self.assertRoleAssignmentInListResponse(r, up_entity)
+ self.assertRoleAssignmentInListResponse(r, up1_entity)
+
+
+class RoleAssignmentBaseTestCase(test_v3.RestfulTestCase):
+ """Base class for testing /v3/role_assignments API behavior."""
+
+ MAX_HIERARCHY_BREADTH = 3
+ MAX_HIERARCHY_DEPTH = CONF.max_project_tree_depth - 1
+
+ def load_sample_data(self):
+ """Creates sample data to be used on tests.
+
+ Created data are i) a role and ii) a domain containing: a project
+ hierarchy and 3 users within 3 groups.
+
+ """
+ def create_project_hierarchy(parent_id, depth):
+ "Creates a random project hierarchy."
+ if depth == 0:
+ return
+
+ breadth = random.randint(1, self.MAX_HIERARCHY_BREADTH)
+
+ subprojects = []
+ for i in range(breadth):
+ subprojects.append(self.new_project_ref(
+ domain_id=self.domain_id, parent_id=parent_id))
+ self.assignment_api.create_project(subprojects[-1]['id'],
+ subprojects[-1])
+
+ new_parent = subprojects[random.randint(0, breadth - 1)]
+ create_project_hierarchy(new_parent['id'], depth - 1)
+
+ super(RoleAssignmentBaseTestCase, self).load_sample_data()
+
+ # Create a domain
+ self.domain = self.new_domain_ref()
+ self.domain_id = self.domain['id']
+ self.assignment_api.create_domain(self.domain_id, self.domain)
+
+ # Create a project hierarchy
+ self.project = self.new_project_ref(domain_id=self.domain_id)
+ self.project_id = self.project['id']
+ self.assignment_api.create_project(self.project_id, self.project)
+
+ # Create a random project hierarchy
+ create_project_hierarchy(self.project_id,
+ random.randint(1, self.MAX_HIERARCHY_DEPTH))
+
+ # Create 3 users
+ self.user_ids = []
+ for i in range(3):
+ user = self.new_user_ref(domain_id=self.domain_id)
+ user = self.identity_api.create_user(user)
+ self.user_ids.append(user['id'])
+
+ # Create 3 groups
+ self.group_ids = []
+ for i in range(3):
+ group = self.new_group_ref(domain_id=self.domain_id)
+ group = self.identity_api.create_group(group)
+ self.group_ids.append(group['id'])
+
+ # Put 2 members on each group
+ self.identity_api.add_user_to_group(user_id=self.user_ids[i],
+ group_id=group['id'])
+ self.identity_api.add_user_to_group(user_id=self.user_ids[i % 2],
+ group_id=group['id'])
+
+ self.assignment_api.create_grant(user_id=self.user_id,
+ project_id=self.project_id,
+ role_id=self.role_id)
+
+ # Create a role
+ self.role = self.new_role_ref()
+ self.role_id = self.role['id']
+ self.assignment_api.create_role(self.role_id, self.role)
+
+ # Set default user and group to be used on tests
+ self.default_user_id = self.user_ids[0]
+ self.default_group_id = self.group_ids[0]
+
+ def get_role_assignments(self, expected_status=200, **filters):
+ """Returns the result from querying role assignment API + queried URL.
+
+ Calls GET /v3/role_assignments?<params> and returns its result, where
+ <params> is the HTTP query parameters form of effective option plus
+ filters, if provided. Queried URL is returned as well.
+
+ :returns: a tuple containing the list role assignments API response and
+ queried URL.
+
+ """
+
+ query_url = self._get_role_assignments_query_url(**filters)
+ response = self.get(query_url, expected_status=expected_status)
+
+ return (response, query_url)
+
+ def _get_role_assignments_query_url(self, **filters):
+ """Returns non-effective role assignments query URL from given filters.
+
+ :param filters: query parameters are created with the provided filters
+ on role assignments attributes. Valid filters are:
+ role_id, domain_id, project_id, group_id, user_id and
+ inherited_to_projects.
+
+ :returns: role assignments query URL.
+
+ """
+ return _build_role_assignment_query_url(**filters)
+
+
+class RoleAssignmentFailureTestCase(RoleAssignmentBaseTestCase):
+ """Class for testing invalid query params on /v3/role_assignments API.
+
+ Querying domain and project, or user and group results in a HTTP 400, since
+ a role assignment must contain only a single pair of (actor, target). In
+ addition, since filtering on role assignments applies only to the final
+ result, effective mode cannot be combined with i) group or ii) domain and
+ inherited, because it would always result in an empty list.
+
+ """
+
+ def test_get_role_assignments_by_domain_and_project(self):
+ self.get_role_assignments(domain_id=self.domain_id,
+ project_id=self.project_id,
+ expected_status=400)
+
+ def test_get_role_assignments_by_user_and_group(self):
+ self.get_role_assignments(user_id=self.default_user_id,
+ group_id=self.default_group_id,
+ expected_status=400)
+
+ def test_get_role_assignments_by_effective_and_inherited(self):
+ self.config_fixture.config(group='os_inherit', enabled=True)
+
+ self.get_role_assignments(domain_id=self.domain_id, effective=True,
+ inherited_to_projects=True,
+ expected_status=400)
+
+ def test_get_role_assignments_by_effective_and_group(self):
+ self.get_role_assignments(effective=True,
+ group_id=self.default_group_id,
+ expected_status=400)
+
+
+class RoleAssignmentDirectTestCase(RoleAssignmentBaseTestCase):
+ """Class for testing direct assignments on /v3/role_assignments API.
+
+ Direct assignments on a domain or project have effect on them directly,
+ instead of on their project hierarchy, i.e they are non-inherited. In
+ addition, group direct assignments are not expanded to group's users.
+
+ Tests on this class make assertions on the representation and API filtering
+ of direct assignments.
+
+ """
+
+ def _test_get_role_assignments(self, **filters):
+ """Generic filtering test method.
+
+ According to the provided filters, this method:
+ - creates a new role assignment;
+ - asserts that list role assignments API reponds correctly;
+ - deletes the created role assignment.
+
+ :param filters: filters to be considered when listing role assignments.
+ Valid filters are: role_id, domain_id, project_id,
+ group_id, user_id and inherited_to_projects.
+
+ """
+
+ # Fills default assignment with provided filters
+ test_assignment = self._set_default_assignment_attributes(**filters)
+
+ # Create new role assignment for this test
+ self.assignment_api.create_grant(**test_assignment)
+
+ # Get expected role assignments
+ expected_assignments = self._list_expected_role_assignments(
+ **test_assignment)
+
+ # Get role assignments from API
+ response, query_url = self.get_role_assignments(**test_assignment)
+ self.assertValidRoleAssignmentListResponse(response,
+ resource_url=query_url)
+ self.assertEqual(len(expected_assignments),
+ len(response.result.get('role_assignments')))
+
+ # Assert that expected role assignments were returned by the API call
+ for assignment in expected_assignments:
+ self.assertRoleAssignmentInListResponse(response, assignment)
+
+ # Delete created role assignment
+ self.assignment_api.delete_grant(**test_assignment)
+
+ def _set_default_assignment_attributes(self, **attribs):
+ """Inserts default values for missing attributes of role assignment.
+
+ If no actor, target or role are provided, they will default to values
+ from sample data.
+
+ :param attribs: info from a role assignment entity. Valid attributes
+ are: role_id, domain_id, project_id, group_id, user_id
+ and inherited_to_projects.
+
+ """
+ if not any(target in attribs
+ for target in ('domain_id', 'projects_id')):
+ attribs['project_id'] = self.project_id
+
+ if not any(actor in attribs for actor in ('user_id', 'group_id')):
+ attribs['user_id'] = self.default_user_id
+
+ if 'role_id' not in attribs:
+ attribs['role_id'] = self.role_id
+
+ return attribs
+
+ def _list_expected_role_assignments(self, **filters):
+ """Given the filters, it returns expected direct role assignments.
+
+ :param filters: filters that will be considered when listing role
+ assignments. Valid filters are: role_id, domain_id,
+ project_id, group_id, user_id and
+ inherited_to_projects.
+
+ :returns: the list of the expected role assignments.
+
+ """
+ return [_build_role_assignment_entity(**filters)]
+
+ # Test cases below call the generic test method, providing different filter
+ # combinations. Filters are provided as specified in the method name, after
+ # 'by'. For example, test_get_role_assignments_by_project_user_and_role
+ # calls the generic test method with project_id, user_id and role_id.
+
+ def test_get_role_assignments_by_domain(self, **filters):
+ self._test_get_role_assignments(domain_id=self.domain_id, **filters)
+
+ def test_get_role_assignments_by_project(self, **filters):
+ self._test_get_role_assignments(project_id=self.project_id, **filters)
+
+ def test_get_role_assignments_by_user(self, **filters):
+ self._test_get_role_assignments(user_id=self.default_user_id,
+ **filters)
+
+ def test_get_role_assignments_by_group(self, **filters):
+ self._test_get_role_assignments(group_id=self.default_group_id,
+ **filters)
+
+ def test_get_role_assignments_by_role(self, **filters):
+ self._test_get_role_assignments(role_id=self.role_id, **filters)
+
+ def test_get_role_assignments_by_domain_and_user(self, **filters):
+ self.test_get_role_assignments_by_domain(user_id=self.default_user_id,
+ **filters)
+
+ def test_get_role_assignments_by_domain_and_group(self, **filters):
+ self.test_get_role_assignments_by_domain(
+ group_id=self.default_group_id, **filters)
+
+ def test_get_role_assignments_by_project_and_user(self, **filters):
+ self.test_get_role_assignments_by_project(user_id=self.default_user_id,
+ **filters)
+
+ def test_get_role_assignments_by_project_and_group(self, **filters):
+ self.test_get_role_assignments_by_project(
+ group_id=self.default_group_id, **filters)
+
+ def test_get_role_assignments_by_domain_user_and_role(self, **filters):
+ self.test_get_role_assignments_by_domain_and_user(role_id=self.role_id,
+ **filters)
+
+ def test_get_role_assignments_by_domain_group_and_role(self, **filters):
+ self.test_get_role_assignments_by_domain_and_group(
+ role_id=self.role_id, **filters)
+
+ def test_get_role_assignments_by_project_user_and_role(self, **filters):
+ self.test_get_role_assignments_by_project_and_user(
+ role_id=self.role_id, **filters)
+
+ def test_get_role_assignments_by_project_group_and_role(self, **filters):
+ self.test_get_role_assignments_by_project_and_group(
+ role_id=self.role_id, **filters)
+
+
+class RoleAssignmentInheritedTestCase(RoleAssignmentDirectTestCase):
+ """Class for testing inherited assignments on /v3/role_assignments API.
+
+ Inherited assignments on a domain or project have no effect on them
+ directly, but on the projects under them instead.
+
+ Tests on this class do not make assertions on the effect of inherited
+ assignments, but in their representation and API filtering.
+
+ """
+
+ def config_overrides(self):
+ super(RoleAssignmentBaseTestCase, self).config_overrides()
+ self.config_fixture.config(group='os_inherit', enabled=True)
+
+ def _test_get_role_assignments(self, **filters):
+ """Adds inherited_to_project filter to expected entity in tests."""
+ super(RoleAssignmentInheritedTestCase,
+ self)._test_get_role_assignments(inherited_to_projects=True,
+ **filters)
+
+
+class RoleAssignmentEffectiveTestCase(RoleAssignmentInheritedTestCase):
+ """Class for testing inheritance effects on /v3/role_assignments API.
+
+ Inherited assignments on a domain or project have no effect on them
+ directly, but on the projects under them instead.
+
+ Tests on this class make assertions on the effect of inherited assignments
+ and API filtering.
+
+ """
+
+ def _get_role_assignments_query_url(self, **filters):
+ """Returns effective role assignments query URL from given filters.
+
+ For test methods in this class, effetive will always be true. As in
+ effective mode, inherited_to_projects, group_id, domain_id and
+ project_id will always be desconsidered from provided filters.
+
+ :param filters: query parameters are created with the provided filters.
+ Valid filters are: role_id, domain_id, project_id,
+ group_id, user_id and inherited_to_projects.
+
+ :returns: role assignments query URL.
+
+ """
+ query_filters = filters.copy()
+ query_filters.pop('inherited_to_projects')
+
+ query_filters.pop('group_id', None)
+ query_filters.pop('domain_id', None)
+ query_filters.pop('project_id', None)
+
+ return _build_role_assignment_query_url(effective=True,
+ **query_filters)
+
+ def _list_expected_role_assignments(self, **filters):
+ """Given the filters, it returns expected direct role assignments.
+
+ :param filters: filters that will be considered when listing role
+ assignments. Valid filters are: role_id, domain_id,
+ project_id, group_id, user_id and
+ inherited_to_projects.
+
+ :returns: the list of the expected role assignments.
+
+ """
+ # Get assignment link, to be put on 'links': {'assignment': link}
+ assignment_link = _build_role_assignment_link(**filters)
+
+ # Expand group membership
+ user_ids = [None]
+ if filters.get('group_id'):
+ user_ids = [user['id'] for user in
+ self.identity_api.list_users_in_group(
+ filters['group_id'])]
+ else:
+ user_ids = [self.default_user_id]
+
+ # Expand role inheritance
+ project_ids = [None]
+ if filters.get('domain_id'):
+ project_ids = [project['id'] for project in
+ self.assignment_api.list_projects_in_domain(
+ filters.pop('domain_id'))]
+ else:
+ project_ids = [project['id'] for project in
+ self.assignment_api.list_projects_in_subtree(
+ self.project_id)]
+
+ # Compute expected role assignments
+ assignments = []
+ for project_id in project_ids:
+ filters['project_id'] = project_id
+ for user_id in user_ids:
+ filters['user_id'] = user_id
+ assignments.append(_build_role_assignment_entity(
+ link=assignment_link, **filters))
+
+ return assignments
+
+
+class AssignmentInheritanceTestCase(test_v3.RestfulTestCase):
+ """Test inheritance crud and its effects."""
+
+ def config_overrides(self):
+ super(AssignmentInheritanceTestCase, self).config_overrides()
+ self.config_fixture.config(group='os_inherit', enabled=True)
+
+ def test_get_token_from_inherited_user_domain_role_grants(self):
+ # Create a new user to ensure that no grant is loaded from sample data
+ user = self.new_user_ref(domain_id=self.domain_id)
+ password = user['password']
+ user = self.identity_api.create_user(user)
+ user['password'] = password
+
+ # Define domain and project authentication data
+ domain_auth_data = self.build_authentication_request(
+ user_id=user['id'],
+ password=user['password'],
+ domain_id=self.domain_id)
+ project_auth_data = self.build_authentication_request(
+ user_id=user['id'],
+ password=user['password'],
+ project_id=self.project_id)
+
+ # Check the user cannot get a domain nor a project token
+ self.v3_authenticate_token(domain_auth_data, expected_status=401)
+ self.v3_authenticate_token(project_auth_data, expected_status=401)
+
+ # Grant non-inherited role for user on domain
+ non_inher_ud_link = _build_role_assignment_link(
+ domain_id=self.domain_id, user_id=user['id'], role_id=self.role_id)
+ self.put(non_inher_ud_link)
+
+ # Check the user can get only a domain token
+ self.v3_authenticate_token(domain_auth_data)
+ self.v3_authenticate_token(project_auth_data, expected_status=401)
+
+ # Create inherited role
+ inherited_role = {'id': uuid.uuid4().hex, 'name': 'inherited'}
+ self.role_api.create_role(inherited_role['id'], inherited_role)
+
+ # Grant inherited role for user on domain
+ inher_ud_link = _build_role_assignment_link(
+ domain_id=self.domain_id, user_id=user['id'],
+ role_id=inherited_role['id'], inherited_to_projects=True)
+ self.put(inher_ud_link)
+
+ # Check the user can get both a domain and a project token
+ self.v3_authenticate_token(domain_auth_data)
+ self.v3_authenticate_token(project_auth_data)
+
+ # Delete inherited grant
+ self.delete(inher_ud_link)
+
+ # Check the user can only get a domain token
+ self.v3_authenticate_token(domain_auth_data)
+ self.v3_authenticate_token(project_auth_data, expected_status=401)
+
+ # Delete non-inherited grant
+ self.delete(non_inher_ud_link)
+
+ # Check the user cannot get a domain token anymore
+ self.v3_authenticate_token(domain_auth_data, expected_status=401)
+
+ def test_get_token_from_inherited_group_domain_role_grants(self):
+ # Create a new group and put a new user in it to
+ # ensure that no grant is loaded from sample data
+ user = self.new_user_ref(domain_id=self.domain_id)
+ password = user['password']
+ user = self.identity_api.create_user(user)
+ user['password'] = password
+
+ group = self.new_group_ref(domain_id=self.domain['id'])
+ group = self.identity_api.create_group(group)
+ self.identity_api.add_user_to_group(user['id'], group['id'])
+
+ # Define domain and project authentication data
+ domain_auth_data = self.build_authentication_request(
+ user_id=user['id'],
+ password=user['password'],
+ domain_id=self.domain_id)
+ project_auth_data = self.build_authentication_request(
+ user_id=user['id'],
+ password=user['password'],
+ project_id=self.project_id)
+
+ # Check the user cannot get a domain nor a project token
+ self.v3_authenticate_token(domain_auth_data, expected_status=401)
+ self.v3_authenticate_token(project_auth_data, expected_status=401)
+
+ # Grant non-inherited role for user on domain
+ non_inher_gd_link = _build_role_assignment_link(
+ domain_id=self.domain_id, user_id=user['id'], role_id=self.role_id)
+ self.put(non_inher_gd_link)
+
+ # Check the user can get only a domain token
+ self.v3_authenticate_token(domain_auth_data)
+ self.v3_authenticate_token(project_auth_data, expected_status=401)
+
+ # Create inherited role
+ inherited_role = {'id': uuid.uuid4().hex, 'name': 'inherited'}
+ self.role_api.create_role(inherited_role['id'], inherited_role)
+
+ # Grant inherited role for user on domain
+ inher_gd_link = _build_role_assignment_link(
+ domain_id=self.domain_id, user_id=user['id'],
+ role_id=inherited_role['id'], inherited_to_projects=True)
+ self.put(inher_gd_link)
+
+ # Check the user can get both a domain and a project token
+ self.v3_authenticate_token(domain_auth_data)
+ self.v3_authenticate_token(project_auth_data)
+
+ # Delete inherited grant
+ self.delete(inher_gd_link)
+
+ # Check the user can only get a domain token
+ self.v3_authenticate_token(domain_auth_data)
+ self.v3_authenticate_token(project_auth_data, expected_status=401)
+
+ # Delete non-inherited grant
+ self.delete(non_inher_gd_link)
+
+ # Check the user cannot get a domain token anymore
+ self.v3_authenticate_token(domain_auth_data, expected_status=401)
+
+ def test_crud_user_inherited_domain_role_grants(self):
+ role_list = []
+ for _ in range(2):
+ role = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.role_api.create_role(role['id'], role)
+ role_list.append(role)
+
+ # Create a non-inherited role as a spoiler
+ self.assignment_api.create_grant(
+ role_list[1]['id'], user_id=self.user['id'],
+ domain_id=self.domain_id)
+
+ base_collection_url = (
+ '/OS-INHERIT/domains/%(domain_id)s/users/%(user_id)s/roles' % {
+ 'domain_id': self.domain_id,
+ 'user_id': self.user['id']})
+ member_url = '%(collection_url)s/%(role_id)s/inherited_to_projects' % {
+ 'collection_url': base_collection_url,
+ 'role_id': role_list[0]['id']}
+ collection_url = base_collection_url + '/inherited_to_projects'
+
+ self.put(member_url)
+
+ # Check we can read it back
+ self.head(member_url)
+ r = self.get(collection_url)
+ self.assertValidRoleListResponse(r, ref=role_list[0],
+ resource_url=collection_url)
+
+ # Now delete and check its gone
+ self.delete(member_url)
+ r = self.get(collection_url)
+ self.assertValidRoleListResponse(r, expected_length=0,
+ resource_url=collection_url)
+
+ def test_list_role_assignments_for_inherited_domain_grants(self):
+ """Call ``GET /role_assignments with inherited domain grants``.
+
+ Test Plan:
+
+ - Create 4 roles
+ - Create a domain with a user and two projects
+ - Assign two direct roles to project1
+ - Assign a spoiler role to project2
+ - Issue the URL to add inherited role to the domain
+ - Issue the URL to check it is indeed on the domain
+ - Issue the URL to check effective roles on project1 - this
+ should return 3 roles.
+
+ """
+ role_list = []
+ for _ in range(4):
+ role = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.role_api.create_role(role['id'], role)
+ role_list.append(role)
+
+ domain = self.new_domain_ref()
+ self.resource_api.create_domain(domain['id'], domain)
+ user1 = self.new_user_ref(
+ domain_id=domain['id'])
+ password = user1['password']
+ user1 = self.identity_api.create_user(user1)
+ user1['password'] = password
+ project1 = self.new_project_ref(
+ domain_id=domain['id'])
+ self.resource_api.create_project(project1['id'], project1)
+ project2 = self.new_project_ref(
+ domain_id=domain['id'])
+ self.resource_api.create_project(project2['id'], project2)
+ # Add some roles to the project
+ self.assignment_api.add_role_to_user_and_project(
+ user1['id'], project1['id'], role_list[0]['id'])
+ self.assignment_api.add_role_to_user_and_project(
+ user1['id'], project1['id'], role_list[1]['id'])
+ # ..and one on a different project as a spoiler
+ self.assignment_api.add_role_to_user_and_project(
+ user1['id'], project2['id'], role_list[2]['id'])
+
+ # Now create our inherited role on the domain
+ base_collection_url = (
+ '/OS-INHERIT/domains/%(domain_id)s/users/%(user_id)s/roles' % {
+ 'domain_id': domain['id'],
+ 'user_id': user1['id']})
+ member_url = '%(collection_url)s/%(role_id)s/inherited_to_projects' % {
+ 'collection_url': base_collection_url,
+ 'role_id': role_list[3]['id']}
+ collection_url = base_collection_url + '/inherited_to_projects'
+
+ self.put(member_url)
+ self.head(member_url)
+ r = self.get(collection_url)
+ self.assertValidRoleListResponse(r, ref=role_list[3],
+ resource_url=collection_url)
+
+ # Now use the list domain role assignments api to check if this
+ # is included
+ collection_url = (
+ '/role_assignments?user.id=%(user_id)s'
+ '&scope.domain.id=%(domain_id)s' % {
+ 'user_id': user1['id'],
+ 'domain_id': domain['id']})
+ r = self.get(collection_url)
+ self.assertValidRoleAssignmentListResponse(r,
+ expected_length=1,
+ resource_url=collection_url)
+ ud_entity = _build_role_assignment_entity(
+ domain_id=domain['id'], user_id=user1['id'],
+ role_id=role_list[3]['id'], inherited_to_projects=True)
+ self.assertRoleAssignmentInListResponse(r, ud_entity)
+
+ # Now ask for effective list role assignments - the role should
+ # turn into a project role, along with the two direct roles that are
+ # on the project
+ collection_url = (
+ '/role_assignments?effective&user.id=%(user_id)s'
+ '&scope.project.id=%(project_id)s' % {
+ 'user_id': user1['id'],
+ 'project_id': project1['id']})
+ r = self.get(collection_url)
+ self.assertValidRoleAssignmentListResponse(r,
+ expected_length=3,
+ resource_url=collection_url)
+ # An effective role for an inherited role will be a project
+ # entity, with a domain link to the inherited assignment
+ ud_url = _build_role_assignment_link(
+ domain_id=domain['id'], user_id=user1['id'],
+ role_id=role_list[3]['id'], inherited_to_projects=True)
+ up_entity = _build_role_assignment_entity(link=ud_url,
+ project_id=project1['id'],
+ user_id=user1['id'],
+ role_id=role_list[3]['id'],
+ inherited_to_projects=True)
+ self.assertRoleAssignmentInListResponse(r, up_entity)
+
+ def test_list_role_assignments_for_disabled_inheritance_extension(self):
+ """Call ``GET /role_assignments with inherited domain grants``.
+
+ Test Plan:
+
+ - Issue the URL to add inherited role to the domain
+ - Issue the URL to check effective roles on project include the
+ inherited role
+ - Disable the extension
+ - Re-check the effective roles, proving the inherited role no longer
+ shows up.
+
+ """
+
+ role_list = []
+ for _ in range(4):
+ role = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.role_api.create_role(role['id'], role)
+ role_list.append(role)
+
+ domain = self.new_domain_ref()
+ self.resource_api.create_domain(domain['id'], domain)
+ user1 = self.new_user_ref(
+ domain_id=domain['id'])
+ password = user1['password']
+ user1 = self.identity_api.create_user(user1)
+ user1['password'] = password
+ project1 = self.new_project_ref(
+ domain_id=domain['id'])
+ self.resource_api.create_project(project1['id'], project1)
+ project2 = self.new_project_ref(
+ domain_id=domain['id'])
+ self.resource_api.create_project(project2['id'], project2)
+ # Add some roles to the project
+ self.assignment_api.add_role_to_user_and_project(
+ user1['id'], project1['id'], role_list[0]['id'])
+ self.assignment_api.add_role_to_user_and_project(
+ user1['id'], project1['id'], role_list[1]['id'])
+ # ..and one on a different project as a spoiler
+ self.assignment_api.add_role_to_user_and_project(
+ user1['id'], project2['id'], role_list[2]['id'])
+
+ # Now create our inherited role on the domain
+ base_collection_url = (
+ '/OS-INHERIT/domains/%(domain_id)s/users/%(user_id)s/roles' % {
+ 'domain_id': domain['id'],
+ 'user_id': user1['id']})
+ member_url = '%(collection_url)s/%(role_id)s/inherited_to_projects' % {
+ 'collection_url': base_collection_url,
+ 'role_id': role_list[3]['id']}
+ collection_url = base_collection_url + '/inherited_to_projects'
+
+ self.put(member_url)
+ self.head(member_url)
+ r = self.get(collection_url)
+ self.assertValidRoleListResponse(r, ref=role_list[3],
+ resource_url=collection_url)
+
+ # Get effective list role assignments - the role should
+ # turn into a project role, along with the two direct roles that are
+ # on the project
+ collection_url = (
+ '/role_assignments?effective&user.id=%(user_id)s'
+ '&scope.project.id=%(project_id)s' % {
+ 'user_id': user1['id'],
+ 'project_id': project1['id']})
+ r = self.get(collection_url)
+ self.assertValidRoleAssignmentListResponse(r,
+ expected_length=3,
+ resource_url=collection_url)
+
+ ud_url = _build_role_assignment_link(
+ domain_id=domain['id'], user_id=user1['id'],
+ role_id=role_list[3]['id'], inherited_to_projects=True)
+ up_entity = _build_role_assignment_entity(link=ud_url,
+ project_id=project1['id'],
+ user_id=user1['id'],
+ role_id=role_list[3]['id'],
+ inherited_to_projects=True)
+
+ self.assertRoleAssignmentInListResponse(r, up_entity)
+
+ # Disable the extension and re-check the list, the role inherited
+ # from the project should no longer show up
+ self.config_fixture.config(group='os_inherit', enabled=False)
+ r = self.get(collection_url)
+ self.assertValidRoleAssignmentListResponse(r,
+ expected_length=2,
+ resource_url=collection_url)
+
+ self.assertRoleAssignmentNotInListResponse(r, up_entity)
+
+ def test_list_role_assignments_for_inherited_group_domain_grants(self):
+ """Call ``GET /role_assignments with inherited group domain grants``.
+
+ Test Plan:
+
+ - Create 4 roles
+ - Create a domain with a user and two projects
+ - Assign two direct roles to project1
+ - Assign a spoiler role to project2
+ - Issue the URL to add inherited role to the domain
+ - Issue the URL to check it is indeed on the domain
+ - Issue the URL to check effective roles on project1 - this
+ should return 3 roles.
+
+ """
+ role_list = []
+ for _ in range(4):
+ role = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.role_api.create_role(role['id'], role)
+ role_list.append(role)
+
+ domain = self.new_domain_ref()
+ self.resource_api.create_domain(domain['id'], domain)
+ user1 = self.new_user_ref(
+ domain_id=domain['id'])
+ password = user1['password']
+ user1 = self.identity_api.create_user(user1)
+ user1['password'] = password
+ user2 = self.new_user_ref(
+ domain_id=domain['id'])
+ password = user2['password']
+ user2 = self.identity_api.create_user(user2)
+ user2['password'] = password
+ group1 = self.new_group_ref(
+ domain_id=domain['id'])
+ group1 = self.identity_api.create_group(group1)
+ self.identity_api.add_user_to_group(user1['id'],
+ group1['id'])
+ self.identity_api.add_user_to_group(user2['id'],
+ group1['id'])
+ project1 = self.new_project_ref(
+ domain_id=domain['id'])
+ self.resource_api.create_project(project1['id'], project1)
+ project2 = self.new_project_ref(
+ domain_id=domain['id'])
+ self.resource_api.create_project(project2['id'], project2)
+ # Add some roles to the project
+ self.assignment_api.add_role_to_user_and_project(
+ user1['id'], project1['id'], role_list[0]['id'])
+ self.assignment_api.add_role_to_user_and_project(
+ user1['id'], project1['id'], role_list[1]['id'])
+ # ..and one on a different project as a spoiler
+ self.assignment_api.add_role_to_user_and_project(
+ user1['id'], project2['id'], role_list[2]['id'])
+
+ # Now create our inherited role on the domain
+ base_collection_url = (
+ '/OS-INHERIT/domains/%(domain_id)s/groups/%(group_id)s/roles' % {
+ 'domain_id': domain['id'],
+ 'group_id': group1['id']})
+ member_url = '%(collection_url)s/%(role_id)s/inherited_to_projects' % {
+ 'collection_url': base_collection_url,
+ 'role_id': role_list[3]['id']}
+ collection_url = base_collection_url + '/inherited_to_projects'
+
+ self.put(member_url)
+ self.head(member_url)
+ r = self.get(collection_url)
+ self.assertValidRoleListResponse(r, ref=role_list[3],
+ resource_url=collection_url)
+
+ # Now use the list domain role assignments api to check if this
+ # is included
+ collection_url = (
+ '/role_assignments?group.id=%(group_id)s'
+ '&scope.domain.id=%(domain_id)s' % {
+ 'group_id': group1['id'],
+ 'domain_id': domain['id']})
+ r = self.get(collection_url)
+ self.assertValidRoleAssignmentListResponse(r,
+ expected_length=1,
+ resource_url=collection_url)
+ gd_entity = _build_role_assignment_entity(
+ domain_id=domain['id'], group_id=group1['id'],
+ role_id=role_list[3]['id'], inherited_to_projects=True)
+ self.assertRoleAssignmentInListResponse(r, gd_entity)
+
+ # Now ask for effective list role assignments - the role should
+ # turn into a user project role, along with the two direct roles
+ # that are on the project
+ collection_url = (
+ '/role_assignments?effective&user.id=%(user_id)s'
+ '&scope.project.id=%(project_id)s' % {
+ 'user_id': user1['id'],
+ 'project_id': project1['id']})
+ r = self.get(collection_url)
+ self.assertValidRoleAssignmentListResponse(r,
+ expected_length=3,
+ resource_url=collection_url)
+ # An effective role for an inherited role will be a project
+ # entity, with a domain link to the inherited assignment
+ up_entity = _build_role_assignment_entity(
+ link=gd_entity['links']['assignment'], project_id=project1['id'],
+ user_id=user1['id'], role_id=role_list[3]['id'],
+ inherited_to_projects=True)
+ self.assertRoleAssignmentInListResponse(r, up_entity)
+
+ def test_filtered_role_assignments_for_inherited_grants(self):
+ """Call ``GET /role_assignments?scope.OS-INHERIT:inherited_to``.
+
+ Test Plan:
+
+ - Create 5 roles
+ - Create a domain with a user, group and two projects
+ - Assign three direct spoiler roles to projects
+ - Issue the URL to add an inherited user role to the domain
+ - Issue the URL to add an inherited group role to the domain
+ - Issue the URL to filter by inherited roles - this should
+ return just the 2 inherited roles.
+
+ """
+ role_list = []
+ for _ in range(5):
+ role = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.role_api.create_role(role['id'], role)
+ role_list.append(role)
+
+ domain = self.new_domain_ref()
+ self.resource_api.create_domain(domain['id'], domain)
+ user1 = self.new_user_ref(
+ domain_id=domain['id'])
+ password = user1['password']
+ user1 = self.identity_api.create_user(user1)
+ user1['password'] = password
+ group1 = self.new_group_ref(
+ domain_id=domain['id'])
+ group1 = self.identity_api.create_group(group1)
+ project1 = self.new_project_ref(
+ domain_id=domain['id'])
+ self.resource_api.create_project(project1['id'], project1)
+ project2 = self.new_project_ref(
+ domain_id=domain['id'])
+ self.resource_api.create_project(project2['id'], project2)
+ # Add some spoiler roles to the projects
+ self.assignment_api.add_role_to_user_and_project(
+ user1['id'], project1['id'], role_list[0]['id'])
+ self.assignment_api.add_role_to_user_and_project(
+ user1['id'], project2['id'], role_list[1]['id'])
+ # Create a non-inherited role as a spoiler
+ self.assignment_api.create_grant(
+ role_list[2]['id'], user_id=user1['id'], domain_id=domain['id'])
+
+ # Now create two inherited roles on the domain, one for a user
+ # and one for a domain
+ base_collection_url = (
+ '/OS-INHERIT/domains/%(domain_id)s/users/%(user_id)s/roles' % {
+ 'domain_id': domain['id'],
+ 'user_id': user1['id']})
+ member_url = '%(collection_url)s/%(role_id)s/inherited_to_projects' % {
+ 'collection_url': base_collection_url,
+ 'role_id': role_list[3]['id']}
+ collection_url = base_collection_url + '/inherited_to_projects'
+
+ self.put(member_url)
+ self.head(member_url)
+ r = self.get(collection_url)
+ self.assertValidRoleListResponse(r, ref=role_list[3],
+ resource_url=collection_url)
+
+ base_collection_url = (
+ '/OS-INHERIT/domains/%(domain_id)s/groups/%(group_id)s/roles' % {
+ 'domain_id': domain['id'],
+ 'group_id': group1['id']})
+ member_url = '%(collection_url)s/%(role_id)s/inherited_to_projects' % {
+ 'collection_url': base_collection_url,
+ 'role_id': role_list[4]['id']}
+ collection_url = base_collection_url + '/inherited_to_projects'
+
+ self.put(member_url)
+ self.head(member_url)
+ r = self.get(collection_url)
+ self.assertValidRoleListResponse(r, ref=role_list[4],
+ resource_url=collection_url)
+
+ # Now use the list role assignments api to get a list of inherited
+ # roles on the domain - should get back the two roles
+ collection_url = (
+ '/role_assignments?scope.OS-INHERIT:inherited_to=projects')
+ r = self.get(collection_url)
+ self.assertValidRoleAssignmentListResponse(r,
+ expected_length=2,
+ resource_url=collection_url)
+ ud_entity = _build_role_assignment_entity(
+ domain_id=domain['id'], user_id=user1['id'],
+ role_id=role_list[3]['id'], inherited_to_projects=True)
+ gd_entity = _build_role_assignment_entity(
+ domain_id=domain['id'], group_id=group1['id'],
+ role_id=role_list[4]['id'], inherited_to_projects=True)
+ self.assertRoleAssignmentInListResponse(r, ud_entity)
+ self.assertRoleAssignmentInListResponse(r, gd_entity)
+
+ def _setup_hierarchical_projects_scenario(self):
+ """Creates basic hierarchical projects scenario.
+
+ This basic scenario contains a root with one leaf project and
+ two roles with the following names: non-inherited and inherited.
+
+ """
+ # Create project hierarchy
+ root = self.new_project_ref(domain_id=self.domain['id'])
+ leaf = self.new_project_ref(domain_id=self.domain['id'],
+ parent_id=root['id'])
+
+ self.resource_api.create_project(root['id'], root)
+ self.resource_api.create_project(leaf['id'], leaf)
+
+ # Create 'non-inherited' and 'inherited' roles
+ non_inherited_role = {'id': uuid.uuid4().hex, 'name': 'non-inherited'}
+ self.role_api.create_role(non_inherited_role['id'], non_inherited_role)
+ inherited_role = {'id': uuid.uuid4().hex, 'name': 'inherited'}
+ self.role_api.create_role(inherited_role['id'], inherited_role)
+
+ return (root['id'], leaf['id'],
+ non_inherited_role['id'], inherited_role['id'])
+
+ def test_get_token_from_inherited_user_project_role_grants(self):
+ # Create default scenario
+ root_id, leaf_id, non_inherited_role_id, inherited_role_id = (
+ self._setup_hierarchical_projects_scenario())
+
+ # Define root and leaf projects authentication data
+ root_project_auth_data = self.build_authentication_request(
+ user_id=self.user['id'],
+ password=self.user['password'],
+ project_id=root_id)
+ leaf_project_auth_data = self.build_authentication_request(
+ user_id=self.user['id'],
+ password=self.user['password'],
+ project_id=leaf_id)
+
+ # Check the user cannot get a token on root nor leaf project
+ self.v3_authenticate_token(root_project_auth_data, expected_status=401)
+ self.v3_authenticate_token(leaf_project_auth_data, expected_status=401)
+
+ # Grant non-inherited role for user on leaf project
+ non_inher_up_link = _build_role_assignment_link(
+ project_id=leaf_id, user_id=self.user['id'],
+ role_id=non_inherited_role_id)
+ self.put(non_inher_up_link)
+
+ # Check the user can only get a token on leaf project
+ self.v3_authenticate_token(root_project_auth_data, expected_status=401)
+ self.v3_authenticate_token(leaf_project_auth_data)
+
+ # Grant inherited role for user on root project
+ inher_up_link = _build_role_assignment_link(
+ project_id=root_id, user_id=self.user['id'],
+ role_id=inherited_role_id, inherited_to_projects=True)
+ self.put(inher_up_link)
+
+ # Check the user still can get a token only on leaf project
+ self.v3_authenticate_token(root_project_auth_data, expected_status=401)
+ self.v3_authenticate_token(leaf_project_auth_data)
+
+ # Delete non-inherited grant
+ self.delete(non_inher_up_link)
+
+ # Check the inherited role still applies for leaf project
+ self.v3_authenticate_token(root_project_auth_data, expected_status=401)
+ self.v3_authenticate_token(leaf_project_auth_data)
+
+ # Delete inherited grant
+ self.delete(inher_up_link)
+
+ # Check the user cannot get a token on leaf project anymore
+ self.v3_authenticate_token(leaf_project_auth_data, expected_status=401)
+
+ def test_get_token_from_inherited_group_project_role_grants(self):
+ # Create default scenario
+ root_id, leaf_id, non_inherited_role_id, inherited_role_id = (
+ self._setup_hierarchical_projects_scenario())
+
+ # Create group and add user to it
+ group = self.new_group_ref(domain_id=self.domain['id'])
+ group = self.identity_api.create_group(group)
+ self.identity_api.add_user_to_group(self.user['id'], group['id'])
+
+ # Define root and leaf projects authentication data
+ root_project_auth_data = self.build_authentication_request(
+ user_id=self.user['id'],
+ password=self.user['password'],
+ project_id=root_id)
+ leaf_project_auth_data = self.build_authentication_request(
+ user_id=self.user['id'],
+ password=self.user['password'],
+ project_id=leaf_id)
+
+ # Check the user cannot get a token on root nor leaf project
+ self.v3_authenticate_token(root_project_auth_data, expected_status=401)
+ self.v3_authenticate_token(leaf_project_auth_data, expected_status=401)
+
+ # Grant non-inherited role for group on leaf project
+ non_inher_gp_link = _build_role_assignment_link(
+ project_id=leaf_id, group_id=group['id'],
+ role_id=non_inherited_role_id)
+ self.put(non_inher_gp_link)
+
+ # Check the user can only get a token on leaf project
+ self.v3_authenticate_token(root_project_auth_data, expected_status=401)
+ self.v3_authenticate_token(leaf_project_auth_data)
+
+ # Grant inherited role for group on root project
+ inher_gp_link = _build_role_assignment_link(
+ project_id=root_id, group_id=group['id'],
+ role_id=inherited_role_id, inherited_to_projects=True)
+ self.put(inher_gp_link)
+
+ # Check the user still can get a token only on leaf project
+ self.v3_authenticate_token(root_project_auth_data, expected_status=401)
+ self.v3_authenticate_token(leaf_project_auth_data)
+
+ # Delete no-inherited grant
+ self.delete(non_inher_gp_link)
+
+ # Check the inherited role still applies for leaf project
+ self.v3_authenticate_token(leaf_project_auth_data)
+
+ # Delete inherited grant
+ self.delete(inher_gp_link)
+
+ # Check the user cannot get a token on leaf project anymore
+ self.v3_authenticate_token(leaf_project_auth_data, expected_status=401)
+
+ def test_get_role_assignments_for_project_hierarchy(self):
+ """Call ``GET /role_assignments``.
+
+ Test Plan:
+
+ - Create 2 roles
+ - Create a hierarchy of projects with one root and one leaf project
+ - Issue the URL to add a non-inherited user role to the root project
+ - Issue the URL to add an inherited user role to the root project
+ - Issue the URL to get all role assignments - this should return just
+ 2 roles (non-inherited and inherited) in the root project.
+
+ """
+ # Create default scenario
+ root_id, leaf_id, non_inherited_role_id, inherited_role_id = (
+ self._setup_hierarchical_projects_scenario())
+
+ # Grant non-inherited role
+ non_inher_up_entity = _build_role_assignment_entity(
+ project_id=root_id, user_id=self.user['id'],
+ role_id=non_inherited_role_id)
+ self.put(non_inher_up_entity['links']['assignment'])
+
+ # Grant inherited role
+ inher_up_entity = _build_role_assignment_entity(
+ project_id=root_id, user_id=self.user['id'],
+ role_id=inherited_role_id, inherited_to_projects=True)
+ self.put(inher_up_entity['links']['assignment'])
+
+ # Get role assignments
+ collection_url = '/role_assignments'
+ r = self.get(collection_url)
+ self.assertValidRoleAssignmentListResponse(r,
+ resource_url=collection_url)
+
+ # Assert that the user has non-inherited role on root project
+ self.assertRoleAssignmentInListResponse(r, non_inher_up_entity)
+
+ # Assert that the user has inherited role on root project
+ self.assertRoleAssignmentInListResponse(r, inher_up_entity)
+
+ # Assert that the user does not have non-inherited role on leaf project
+ non_inher_up_entity = _build_role_assignment_entity(
+ project_id=leaf_id, user_id=self.user['id'],
+ role_id=non_inherited_role_id)
+ self.assertRoleAssignmentNotInListResponse(r, non_inher_up_entity)
+
+ # Assert that the user does not have inherited role on leaf project
+ inher_up_entity['scope']['project']['id'] = leaf_id
+ self.assertRoleAssignmentNotInListResponse(r, inher_up_entity)
+
+ def test_get_effective_role_assignments_for_project_hierarchy(self):
+ """Call ``GET /role_assignments?effective``.
+
+ Test Plan:
+
+ - Create 2 roles
+ - Create a hierarchy of projects with one root and one leaf project
+ - Issue the URL to add a non-inherited user role to the root project
+ - Issue the URL to add an inherited user role to the root project
+ - Issue the URL to get effective role assignments - this should return
+ 1 role (non-inherited) on the root project and 1 role (inherited) on
+ the leaf project.
+
+ """
+ # Create default scenario
+ root_id, leaf_id, non_inherited_role_id, inherited_role_id = (
+ self._setup_hierarchical_projects_scenario())
+
+ # Grant non-inherited role
+ non_inher_up_entity = _build_role_assignment_entity(
+ project_id=root_id, user_id=self.user['id'],
+ role_id=non_inherited_role_id)
+ self.put(non_inher_up_entity['links']['assignment'])
+
+ # Grant inherited role
+ inher_up_entity = _build_role_assignment_entity(
+ project_id=root_id, user_id=self.user['id'],
+ role_id=inherited_role_id, inherited_to_projects=True)
+ self.put(inher_up_entity['links']['assignment'])
+
+ # Get effective role assignments
+ collection_url = '/role_assignments?effective'
+ r = self.get(collection_url)
+ self.assertValidRoleAssignmentListResponse(r,
+ resource_url=collection_url)
+
+ # Assert that the user has non-inherited role on root project
+ self.assertRoleAssignmentInListResponse(r, non_inher_up_entity)
+
+ # Assert that the user does not have inherited role on root project
+ self.assertRoleAssignmentNotInListResponse(r, inher_up_entity)
+
+ # Assert that the user does not have non-inherited role on leaf project
+ non_inher_up_entity = _build_role_assignment_entity(
+ project_id=leaf_id, user_id=self.user['id'],
+ role_id=non_inherited_role_id)
+ self.assertRoleAssignmentNotInListResponse(r, non_inher_up_entity)
+
+ # Assert that the user has inherited role on leaf project
+ inher_up_entity['scope']['project']['id'] = leaf_id
+ self.assertRoleAssignmentInListResponse(r, inher_up_entity)
+
+ def test_get_inherited_role_assignments_for_project_hierarchy(self):
+ """Call ``GET /role_assignments?scope.OS-INHERIT:inherited_to``.
+
+ Test Plan:
+
+ - Create 2 roles
+ - Create a hierarchy of projects with one root and one leaf project
+ - Issue the URL to add a non-inherited user role to the root project
+ - Issue the URL to add an inherited user role to the root project
+ - Issue the URL to filter inherited to projects role assignments - this
+ should return 1 role (inherited) on the root project.
+
+ """
+ # Create default scenario
+ root_id, leaf_id, non_inherited_role_id, inherited_role_id = (
+ self._setup_hierarchical_projects_scenario())
+
+ # Grant non-inherited role
+ non_inher_up_entity = _build_role_assignment_entity(
+ project_id=root_id, user_id=self.user['id'],
+ role_id=non_inherited_role_id)
+ self.put(non_inher_up_entity['links']['assignment'])
+
+ # Grant inherited role
+ inher_up_entity = _build_role_assignment_entity(
+ project_id=root_id, user_id=self.user['id'],
+ role_id=inherited_role_id, inherited_to_projects=True)
+ self.put(inher_up_entity['links']['assignment'])
+
+ # Get inherited role assignments
+ collection_url = ('/role_assignments'
+ '?scope.OS-INHERIT:inherited_to=projects')
+ r = self.get(collection_url)
+ self.assertValidRoleAssignmentListResponse(r,
+ resource_url=collection_url)
+
+ # Assert that the user does not have non-inherited role on root project
+ self.assertRoleAssignmentNotInListResponse(r, non_inher_up_entity)
+
+ # Assert that the user has inherited role on root project
+ self.assertRoleAssignmentInListResponse(r, inher_up_entity)
+
+ # Assert that the user does not have non-inherited role on leaf project
+ non_inher_up_entity = _build_role_assignment_entity(
+ project_id=leaf_id, user_id=self.user['id'],
+ role_id=non_inherited_role_id)
+ self.assertRoleAssignmentNotInListResponse(r, non_inher_up_entity)
+
+ # Assert that the user does not have inherited role on leaf project
+ inher_up_entity['scope']['project']['id'] = leaf_id
+ self.assertRoleAssignmentNotInListResponse(r, inher_up_entity)
+
+
+class AssignmentInheritanceDisabledTestCase(test_v3.RestfulTestCase):
+ """Test inheritance crud and its effects."""
+
+ def config_overrides(self):
+ super(AssignmentInheritanceDisabledTestCase, self).config_overrides()
+ self.config_fixture.config(group='os_inherit', enabled=False)
+
+ def test_crud_inherited_role_grants_failed_if_disabled(self):
+ role = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.role_api.create_role(role['id'], role)
+
+ base_collection_url = (
+ '/OS-INHERIT/domains/%(domain_id)s/users/%(user_id)s/roles' % {
+ 'domain_id': self.domain_id,
+ 'user_id': self.user['id']})
+ member_url = '%(collection_url)s/%(role_id)s/inherited_to_projects' % {
+ 'collection_url': base_collection_url,
+ 'role_id': role['id']}
+ collection_url = base_collection_url + '/inherited_to_projects'
+
+ self.put(member_url, expected_status=404)
+ self.head(member_url, expected_status=404)
+ self.get(collection_url, expected_status=404)
+ self.delete(member_url, expected_status=404)
+
+
+class AssignmentV3toV2MethodsTestCase(tests.TestCase):
+ """Test domain V3 to V2 conversion methods."""
+
+ def test_v2controller_filter_domain_id(self):
+ # V2.0 is not domain aware, ensure domain_id is popped off the ref.
+ other_data = uuid.uuid4().hex
+ domain_id = uuid.uuid4().hex
+ ref = {'domain_id': domain_id,
+ 'other_data': other_data}
+
+ ref_no_domain = {'other_data': other_data}
+ expected_ref = ref_no_domain.copy()
+
+ updated_ref = controller.V2Controller.filter_domain_id(ref)
+ self.assertIs(ref, updated_ref)
+ self.assertDictEqual(ref, expected_ref)
+ # Make sure we don't error/muck up data if domain_id isn't present
+ updated_ref = controller.V2Controller.filter_domain_id(ref_no_domain)
+ self.assertIs(ref_no_domain, updated_ref)
+ self.assertDictEqual(ref_no_domain, expected_ref)
+
+ def test_v3controller_filter_domain_id(self):
+ # No data should be filtered out in this case.
+ other_data = uuid.uuid4().hex
+ domain_id = uuid.uuid4().hex
+ ref = {'domain_id': domain_id,
+ 'other_data': other_data}
+
+ expected_ref = ref.copy()
+ updated_ref = controller.V3Controller.filter_domain_id(ref)
+ self.assertIs(ref, updated_ref)
+ self.assertDictEqual(ref, expected_ref)
+
+ def test_v2controller_filter_domain(self):
+ other_data = uuid.uuid4().hex
+ domain_id = uuid.uuid4().hex
+ non_default_domain_ref = {'domain': {'id': domain_id},
+ 'other_data': other_data}
+ default_domain_ref = {'domain': {'id': 'default'},
+ 'other_data': other_data}
+ updated_ref = controller.V2Controller.filter_domain(default_domain_ref)
+ self.assertNotIn('domain', updated_ref)
+ self.assertRaises(exception.Unauthorized,
+ controller.V2Controller.filter_domain,
+ non_default_domain_ref)
diff --git a/keystone-moon/keystone/tests/unit/test_v3_auth.py b/keystone-moon/keystone/tests/unit/test_v3_auth.py
new file mode 100644
index 00000000..ec079170
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/test_v3_auth.py
@@ -0,0 +1,4494 @@
+# Copyright 2012 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import copy
+import datetime
+import json
+import operator
+import uuid
+
+from keystoneclient.common import cms
+import mock
+from oslo_config import cfg
+from oslo_utils import timeutils
+import six
+from testtools import matchers
+from testtools import testcase
+
+from keystone import auth
+from keystone import exception
+from keystone.policy.backends import rules
+from keystone.tests import unit as tests
+from keystone.tests.unit import ksfixtures
+from keystone.tests.unit import test_v3
+
+
+CONF = cfg.CONF
+
+
+class TestAuthInfo(test_v3.AuthTestMixin, testcase.TestCase):
+ def setUp(self):
+ super(TestAuthInfo, self).setUp()
+ auth.controllers.load_auth_methods()
+
+ def test_missing_auth_methods(self):
+ auth_data = {'identity': {}}
+ auth_data['identity']['token'] = {'id': uuid.uuid4().hex}
+ self.assertRaises(exception.ValidationError,
+ auth.controllers.AuthInfo.create,
+ None,
+ auth_data)
+
+ def test_unsupported_auth_method(self):
+ auth_data = {'methods': ['abc']}
+ auth_data['abc'] = {'test': 'test'}
+ auth_data = {'identity': auth_data}
+ self.assertRaises(exception.AuthMethodNotSupported,
+ auth.controllers.AuthInfo.create,
+ None,
+ auth_data)
+
+ def test_missing_auth_method_data(self):
+ auth_data = {'methods': ['password']}
+ auth_data = {'identity': auth_data}
+ self.assertRaises(exception.ValidationError,
+ auth.controllers.AuthInfo.create,
+ None,
+ auth_data)
+
+ def test_project_name_no_domain(self):
+ auth_data = self.build_authentication_request(
+ username='test',
+ password='test',
+ project_name='abc')['auth']
+ self.assertRaises(exception.ValidationError,
+ auth.controllers.AuthInfo.create,
+ None,
+ auth_data)
+
+ def test_both_project_and_domain_in_scope(self):
+ auth_data = self.build_authentication_request(
+ user_id='test',
+ password='test',
+ project_name='test',
+ domain_name='test')['auth']
+ self.assertRaises(exception.ValidationError,
+ auth.controllers.AuthInfo.create,
+ None,
+ auth_data)
+
+ def test_get_method_names_duplicates(self):
+ auth_data = self.build_authentication_request(
+ token='test',
+ user_id='test',
+ password='test')['auth']
+ auth_data['identity']['methods'] = ['password', 'token',
+ 'password', 'password']
+ context = None
+ auth_info = auth.controllers.AuthInfo.create(context, auth_data)
+ self.assertEqual(auth_info.get_method_names(),
+ ['password', 'token'])
+
+ def test_get_method_data_invalid_method(self):
+ auth_data = self.build_authentication_request(
+ user_id='test',
+ password='test')['auth']
+ context = None
+ auth_info = auth.controllers.AuthInfo.create(context, auth_data)
+
+ method_name = uuid.uuid4().hex
+ self.assertRaises(exception.ValidationError,
+ auth_info.get_method_data,
+ method_name)
+
+
+class TokenAPITests(object):
+ # Why is this not just setUP? Because TokenAPITests is not a test class
+ # itself. If TokenAPITests became a subclass of the testcase, it would get
+ # called by the enumerate-tests-in-file code. The way the functions get
+ # resolved in Python for multiple inheritance means that a setUp in this
+ # would get skipped by the testrunner.
+ def doSetUp(self):
+ auth_data = self.build_authentication_request(
+ username=self.user['name'],
+ user_domain_id=self.domain_id,
+ password=self.user['password'])
+ resp = self.v3_authenticate_token(auth_data)
+ self.token_data = resp.result
+ self.token = resp.headers.get('X-Subject-Token')
+ self.headers = {'X-Subject-Token': resp.headers.get('X-Subject-Token')}
+
+ def test_default_fixture_scope_token(self):
+ self.assertIsNotNone(self.get_scoped_token())
+
+ def verify_token(self, *args, **kwargs):
+ return cms.verify_token(*args, **kwargs)
+
+ def test_v3_token_id(self):
+ auth_data = self.build_authentication_request(
+ user_id=self.user['id'],
+ password=self.user['password'])
+ resp = self.v3_authenticate_token(auth_data)
+ token_data = resp.result
+ token_id = resp.headers.get('X-Subject-Token')
+ self.assertIn('expires_at', token_data['token'])
+
+ decoded_token = self.verify_token(token_id, CONF.signing.certfile,
+ CONF.signing.ca_certs)
+ decoded_token_dict = json.loads(decoded_token)
+
+ token_resp_dict = json.loads(resp.body)
+
+ self.assertEqual(decoded_token_dict, token_resp_dict)
+ # should be able to validate hash PKI token as well
+ hash_token_id = cms.cms_hash_token(token_id)
+ headers = {'X-Subject-Token': hash_token_id}
+ resp = self.get('/auth/tokens', headers=headers)
+ expected_token_data = resp.result
+ self.assertDictEqual(expected_token_data, token_data)
+
+ def test_v3_v2_intermix_non_default_domain_failed(self):
+ auth_data = self.build_authentication_request(
+ user_id=self.user['id'],
+ password=self.user['password'])
+ token = self.get_requested_token(auth_data)
+
+ # now validate the v3 token with v2 API
+ path = '/v2.0/tokens/%s' % (token)
+ self.admin_request(path=path,
+ token='ADMIN',
+ method='GET',
+ expected_status=401)
+
+ def test_v3_v2_intermix_new_default_domain(self):
+ # If the default_domain_id config option is changed, then should be
+ # able to validate a v3 token with user in the new domain.
+
+ # 1) Create a new domain for the user.
+ new_domain_id = uuid.uuid4().hex
+ new_domain = {
+ 'description': uuid.uuid4().hex,
+ 'enabled': True,
+ 'id': new_domain_id,
+ 'name': uuid.uuid4().hex,
+ }
+
+ self.resource_api.create_domain(new_domain_id, new_domain)
+
+ # 2) Create user in new domain.
+ new_user_password = uuid.uuid4().hex
+ new_user = {
+ 'name': uuid.uuid4().hex,
+ 'domain_id': new_domain_id,
+ 'password': new_user_password,
+ 'email': uuid.uuid4().hex,
+ }
+
+ new_user = self.identity_api.create_user(new_user)
+
+ # 3) Update the default_domain_id config option to the new domain
+
+ self.config_fixture.config(group='identity',
+ default_domain_id=new_domain_id)
+
+ # 4) Get a token using v3 api.
+
+ auth_data = self.build_authentication_request(
+ user_id=new_user['id'],
+ password=new_user_password)
+ token = self.get_requested_token(auth_data)
+
+ # 5) Authenticate token using v2 api.
+
+ path = '/v2.0/tokens/%s' % (token)
+ self.admin_request(path=path,
+ token='ADMIN',
+ method='GET')
+
+ def test_v3_v2_intermix_domain_scoped_token_failed(self):
+ # grant the domain role to user
+ path = '/domains/%s/users/%s/roles/%s' % (
+ self.domain['id'], self.user['id'], self.role['id'])
+ self.put(path=path)
+ auth_data = self.build_authentication_request(
+ user_id=self.user['id'],
+ password=self.user['password'],
+ domain_id=self.domain['id'])
+ token = self.get_requested_token(auth_data)
+
+ # now validate the v3 token with v2 API
+ path = '/v2.0/tokens/%s' % (token)
+ self.admin_request(path=path,
+ token='ADMIN',
+ method='GET',
+ expected_status=401)
+
+ def test_v3_v2_intermix_non_default_project_failed(self):
+ auth_data = self.build_authentication_request(
+ user_id=self.default_domain_user['id'],
+ password=self.default_domain_user['password'],
+ project_id=self.project['id'])
+ token = self.get_requested_token(auth_data)
+
+ # now validate the v3 token with v2 API
+ path = '/v2.0/tokens/%s' % (token)
+ self.admin_request(path=path,
+ token='ADMIN',
+ method='GET',
+ expected_status=401)
+
+ def test_v3_v2_unscoped_token_intermix(self):
+ auth_data = self.build_authentication_request(
+ user_id=self.default_domain_user['id'],
+ password=self.default_domain_user['password'])
+ resp = self.v3_authenticate_token(auth_data)
+ token_data = resp.result
+ token = resp.headers.get('X-Subject-Token')
+
+ # now validate the v3 token with v2 API
+ path = '/v2.0/tokens/%s' % (token)
+ resp = self.admin_request(path=path,
+ token='ADMIN',
+ method='GET')
+ v2_token = resp.result
+ self.assertEqual(v2_token['access']['user']['id'],
+ token_data['token']['user']['id'])
+ # v2 token time has not fraction of second precision so
+ # just need to make sure the non fraction part agrees
+ self.assertIn(v2_token['access']['token']['expires'][:-1],
+ token_data['token']['expires_at'])
+
+ def test_v3_v2_token_intermix(self):
+ # FIXME(gyee): PKI tokens are not interchangeable because token
+ # data is baked into the token itself.
+ auth_data = self.build_authentication_request(
+ user_id=self.default_domain_user['id'],
+ password=self.default_domain_user['password'],
+ project_id=self.default_domain_project['id'])
+ resp = self.v3_authenticate_token(auth_data)
+ token_data = resp.result
+ token = resp.headers.get('X-Subject-Token')
+
+ # now validate the v3 token with v2 API
+ path = '/v2.0/tokens/%s' % (token)
+ resp = self.admin_request(path=path,
+ token='ADMIN',
+ method='GET')
+ v2_token = resp.result
+ self.assertEqual(v2_token['access']['user']['id'],
+ token_data['token']['user']['id'])
+ # v2 token time has not fraction of second precision so
+ # just need to make sure the non fraction part agrees
+ self.assertIn(v2_token['access']['token']['expires'][:-1],
+ token_data['token']['expires_at'])
+ self.assertEqual(v2_token['access']['user']['roles'][0]['id'],
+ token_data['token']['roles'][0]['id'])
+
+ def test_v3_v2_hashed_pki_token_intermix(self):
+ auth_data = self.build_authentication_request(
+ user_id=self.default_domain_user['id'],
+ password=self.default_domain_user['password'],
+ project_id=self.default_domain_project['id'])
+ resp = self.v3_authenticate_token(auth_data)
+ token_data = resp.result
+ token = resp.headers.get('X-Subject-Token')
+
+ # should be able to validate a hash PKI token in v2 too
+ token = cms.cms_hash_token(token)
+ path = '/v2.0/tokens/%s' % (token)
+ resp = self.admin_request(path=path,
+ token='ADMIN',
+ method='GET')
+ v2_token = resp.result
+ self.assertEqual(v2_token['access']['user']['id'],
+ token_data['token']['user']['id'])
+ # v2 token time has not fraction of second precision so
+ # just need to make sure the non fraction part agrees
+ self.assertIn(v2_token['access']['token']['expires'][:-1],
+ token_data['token']['expires_at'])
+ self.assertEqual(v2_token['access']['user']['roles'][0]['id'],
+ token_data['token']['roles'][0]['id'])
+
+ def test_v2_v3_unscoped_token_intermix(self):
+ body = {
+ 'auth': {
+ 'passwordCredentials': {
+ 'userId': self.user['id'],
+ 'password': self.user['password']
+ }
+ }}
+ resp = self.admin_request(path='/v2.0/tokens',
+ method='POST',
+ body=body)
+ v2_token_data = resp.result
+ v2_token = v2_token_data['access']['token']['id']
+ headers = {'X-Subject-Token': v2_token}
+ resp = self.get('/auth/tokens', headers=headers)
+ token_data = resp.result
+ self.assertEqual(v2_token_data['access']['user']['id'],
+ token_data['token']['user']['id'])
+ # v2 token time has not fraction of second precision so
+ # just need to make sure the non fraction part agrees
+ self.assertIn(v2_token_data['access']['token']['expires'][-1],
+ token_data['token']['expires_at'])
+
+ def test_v2_v3_token_intermix(self):
+ body = {
+ 'auth': {
+ 'passwordCredentials': {
+ 'userId': self.user['id'],
+ 'password': self.user['password']
+ },
+ 'tenantId': self.project['id']
+ }}
+ resp = self.admin_request(path='/v2.0/tokens',
+ method='POST',
+ body=body)
+ v2_token_data = resp.result
+ v2_token = v2_token_data['access']['token']['id']
+ headers = {'X-Subject-Token': v2_token}
+ resp = self.get('/auth/tokens', headers=headers)
+ token_data = resp.result
+ self.assertEqual(v2_token_data['access']['user']['id'],
+ token_data['token']['user']['id'])
+ # v2 token time has not fraction of second precision so
+ # just need to make sure the non fraction part agrees
+ self.assertIn(v2_token_data['access']['token']['expires'][-1],
+ token_data['token']['expires_at'])
+ self.assertEqual(v2_token_data['access']['user']['roles'][0]['name'],
+ token_data['token']['roles'][0]['name'])
+
+ v2_issued_at = timeutils.parse_isotime(
+ v2_token_data['access']['token']['issued_at'])
+ v3_issued_at = timeutils.parse_isotime(
+ token_data['token']['issued_at'])
+
+ self.assertEqual(v2_issued_at, v3_issued_at)
+
+ def test_rescoping_token(self):
+ expires = self.token_data['token']['expires_at']
+ auth_data = self.build_authentication_request(
+ token=self.token,
+ project_id=self.project_id)
+ r = self.v3_authenticate_token(auth_data)
+ self.assertValidProjectScopedTokenResponse(r)
+ # make sure expires stayed the same
+ self.assertEqual(expires, r.result['token']['expires_at'])
+
+ def test_check_token(self):
+ self.head('/auth/tokens', headers=self.headers, expected_status=200)
+
+ def test_validate_token(self):
+ r = self.get('/auth/tokens', headers=self.headers)
+ self.assertValidUnscopedTokenResponse(r)
+
+ def test_validate_token_nocatalog(self):
+ auth_data = self.build_authentication_request(
+ user_id=self.user['id'],
+ password=self.user['password'],
+ project_id=self.project['id'])
+ headers = {'X-Subject-Token': self.get_requested_token(auth_data)}
+ r = self.get('/auth/tokens?nocatalog', headers=headers)
+ self.assertValidProjectScopedTokenResponse(r, require_catalog=False)
+
+
+class AllowRescopeScopedTokenDisabledTests(test_v3.RestfulTestCase):
+ def config_overrides(self):
+ super(AllowRescopeScopedTokenDisabledTests, self).config_overrides()
+ self.config_fixture.config(
+ group='token',
+ allow_rescope_scoped_token=False)
+
+ def test_rescoping_v3_to_v3_disabled(self):
+ self.v3_authenticate_token(
+ self.build_authentication_request(
+ token=self.get_scoped_token(),
+ project_id=self.project_id),
+ expected_status=403)
+
+ def _v2_token(self):
+ body = {
+ 'auth': {
+ "tenantId": self.project['id'],
+ 'passwordCredentials': {
+ 'userId': self.user['id'],
+ 'password': self.user['password']
+ }
+ }}
+ resp = self.admin_request(path='/v2.0/tokens',
+ method='POST',
+ body=body)
+ v2_token_data = resp.result
+ return v2_token_data
+
+ def _v2_token_from_token(self, token):
+ body = {
+ 'auth': {
+ "tenantId": self.project['id'],
+ "token": token
+ }}
+ self.admin_request(path='/v2.0/tokens',
+ method='POST',
+ body=body,
+ expected_status=403)
+
+ def test_rescoping_v2_to_v3_disabled(self):
+ token = self._v2_token()
+ self.v3_authenticate_token(
+ self.build_authentication_request(
+ token=token['access']['token']['id'],
+ project_id=self.project_id),
+ expected_status=403)
+
+ def test_rescoping_v3_to_v2_disabled(self):
+ token = {'id': self.get_scoped_token()}
+ self._v2_token_from_token(token)
+
+ def test_rescoping_v2_to_v2_disabled(self):
+ token = self._v2_token()
+ self._v2_token_from_token(token['access']['token'])
+
+ def test_rescoped_domain_token_disabled(self):
+
+ self.domainA = self.new_domain_ref()
+ self.assignment_api.create_domain(self.domainA['id'], self.domainA)
+ self.assignment_api.create_grant(self.role['id'],
+ user_id=self.user['id'],
+ domain_id=self.domainA['id'])
+ unscoped_token = self.get_requested_token(
+ self.build_authentication_request(
+ user_id=self.user['id'],
+ password=self.user['password']))
+ # Get a domain-scoped token from the unscoped token
+ domain_scoped_token = self.get_requested_token(
+ self.build_authentication_request(
+ token=unscoped_token,
+ domain_id=self.domainA['id']))
+ self.v3_authenticate_token(
+ self.build_authentication_request(
+ token=domain_scoped_token,
+ project_id=self.project_id),
+ expected_status=403)
+
+
+class TestPKITokenAPIs(test_v3.RestfulTestCase, TokenAPITests):
+ def config_overrides(self):
+ super(TestPKITokenAPIs, self).config_overrides()
+ self.config_fixture.config(
+ group='token',
+ provider='keystone.token.providers.pki.Provider')
+
+ def setUp(self):
+ super(TestPKITokenAPIs, self).setUp()
+ self.doSetUp()
+
+
+class TestPKIZTokenAPIs(test_v3.RestfulTestCase, TokenAPITests):
+
+ def verify_token(self, *args, **kwargs):
+ return cms.pkiz_verify(*args, **kwargs)
+
+ def config_overrides(self):
+ super(TestPKIZTokenAPIs, self).config_overrides()
+ self.config_fixture.config(
+ group='token',
+ provider='keystone.token.providers.pkiz.Provider')
+
+ def setUp(self):
+ super(TestPKIZTokenAPIs, self).setUp()
+ self.doSetUp()
+
+
+class TestUUIDTokenAPIs(test_v3.RestfulTestCase, TokenAPITests):
+ def config_overrides(self):
+ super(TestUUIDTokenAPIs, self).config_overrides()
+ self.config_fixture.config(
+ group='token',
+ provider='keystone.token.providers.uuid.Provider')
+
+ def setUp(self):
+ super(TestUUIDTokenAPIs, self).setUp()
+ self.doSetUp()
+
+ def test_v3_token_id(self):
+ auth_data = self.build_authentication_request(
+ user_id=self.user['id'],
+ password=self.user['password'])
+ resp = self.v3_authenticate_token(auth_data)
+ token_data = resp.result
+ token_id = resp.headers.get('X-Subject-Token')
+ self.assertIn('expires_at', token_data['token'])
+ self.assertFalse(cms.is_asn1_token(token_id))
+
+ def test_v3_v2_hashed_pki_token_intermix(self):
+ # this test is only applicable for PKI tokens
+ # skipping it for UUID tokens
+ pass
+
+
+class TestTokenRevokeSelfAndAdmin(test_v3.RestfulTestCase):
+ """Test token revoke using v3 Identity API by token owner and admin."""
+
+ def load_sample_data(self):
+ """Load Sample Data for Test Cases.
+
+ Two domains, domainA and domainB
+ Two users in domainA, userNormalA and userAdminA
+ One user in domainB, userAdminB
+
+ """
+ super(TestTokenRevokeSelfAndAdmin, self).load_sample_data()
+ # DomainA setup
+ self.domainA = self.new_domain_ref()
+ self.resource_api.create_domain(self.domainA['id'], self.domainA)
+
+ self.userAdminA = self.new_user_ref(domain_id=self.domainA['id'])
+ password = self.userAdminA['password']
+ self.userAdminA = self.identity_api.create_user(self.userAdminA)
+ self.userAdminA['password'] = password
+
+ self.userNormalA = self.new_user_ref(
+ domain_id=self.domainA['id'])
+ password = self.userNormalA['password']
+ self.userNormalA = self.identity_api.create_user(self.userNormalA)
+ self.userNormalA['password'] = password
+
+ self.assignment_api.create_grant(self.role['id'],
+ user_id=self.userAdminA['id'],
+ domain_id=self.domainA['id'])
+
+ def config_overrides(self):
+ super(TestTokenRevokeSelfAndAdmin, self).config_overrides()
+ self.config_fixture.config(
+ group='oslo_policy',
+ policy_file=tests.dirs.etc('policy.v3cloudsample.json'))
+
+ def test_user_revokes_own_token(self):
+ user_token = self.get_requested_token(
+ self.build_authentication_request(
+ user_id=self.userNormalA['id'],
+ password=self.userNormalA['password'],
+ user_domain_id=self.domainA['id']))
+ self.assertNotEmpty(user_token)
+ headers = {'X-Subject-Token': user_token}
+
+ adminA_token = self.get_requested_token(
+ self.build_authentication_request(
+ user_id=self.userAdminA['id'],
+ password=self.userAdminA['password'],
+ domain_name=self.domainA['name']))
+
+ self.head('/auth/tokens', headers=headers, expected_status=200,
+ token=adminA_token)
+ self.head('/auth/tokens', headers=headers, expected_status=200,
+ token=user_token)
+ self.delete('/auth/tokens', headers=headers, expected_status=204,
+ token=user_token)
+ # invalid X-Auth-Token and invalid X-Subject-Token (401)
+ self.head('/auth/tokens', headers=headers, expected_status=401,
+ token=user_token)
+ # invalid X-Auth-Token and invalid X-Subject-Token (401)
+ self.delete('/auth/tokens', headers=headers, expected_status=401,
+ token=user_token)
+ # valid X-Auth-Token and invalid X-Subject-Token (404)
+ self.delete('/auth/tokens', headers=headers, expected_status=404,
+ token=adminA_token)
+ # valid X-Auth-Token and invalid X-Subject-Token (404)
+ self.head('/auth/tokens', headers=headers, expected_status=404,
+ token=adminA_token)
+
+ def test_adminA_revokes_userA_token(self):
+ user_token = self.get_requested_token(
+ self.build_authentication_request(
+ user_id=self.userNormalA['id'],
+ password=self.userNormalA['password'],
+ user_domain_id=self.domainA['id']))
+ self.assertNotEmpty(user_token)
+ headers = {'X-Subject-Token': user_token}
+
+ adminA_token = self.get_requested_token(
+ self.build_authentication_request(
+ user_id=self.userAdminA['id'],
+ password=self.userAdminA['password'],
+ domain_name=self.domainA['name']))
+
+ self.head('/auth/tokens', headers=headers, expected_status=200,
+ token=adminA_token)
+ self.head('/auth/tokens', headers=headers, expected_status=200,
+ token=user_token)
+ self.delete('/auth/tokens', headers=headers, expected_status=204,
+ token=adminA_token)
+ # invalid X-Auth-Token and invalid X-Subject-Token (401)
+ self.head('/auth/tokens', headers=headers, expected_status=401,
+ token=user_token)
+ # valid X-Auth-Token and invalid X-Subject-Token (404)
+ self.delete('/auth/tokens', headers=headers, expected_status=404,
+ token=adminA_token)
+ # valid X-Auth-Token and invalid X-Subject-Token (404)
+ self.head('/auth/tokens', headers=headers, expected_status=404,
+ token=adminA_token)
+
+ def test_adminB_fails_revoking_userA_token(self):
+ # DomainB setup
+ self.domainB = self.new_domain_ref()
+ self.resource_api.create_domain(self.domainB['id'], self.domainB)
+ self.userAdminB = self.new_user_ref(domain_id=self.domainB['id'])
+ password = self.userAdminB['password']
+ self.userAdminB = self.identity_api.create_user(self.userAdminB)
+ self.userAdminB['password'] = password
+ self.assignment_api.create_grant(self.role['id'],
+ user_id=self.userAdminB['id'],
+ domain_id=self.domainB['id'])
+
+ user_token = self.get_requested_token(
+ self.build_authentication_request(
+ user_id=self.userNormalA['id'],
+ password=self.userNormalA['password'],
+ user_domain_id=self.domainA['id']))
+ headers = {'X-Subject-Token': user_token}
+
+ adminB_token = self.get_requested_token(
+ self.build_authentication_request(
+ user_id=self.userAdminB['id'],
+ password=self.userAdminB['password'],
+ domain_name=self.domainB['name']))
+
+ self.head('/auth/tokens', headers=headers, expected_status=403,
+ token=adminB_token)
+ self.delete('/auth/tokens', headers=headers, expected_status=403,
+ token=adminB_token)
+
+
+class TestTokenRevokeById(test_v3.RestfulTestCase):
+ """Test token revocation on the v3 Identity API."""
+
+ def config_overrides(self):
+ super(TestTokenRevokeById, self).config_overrides()
+ self.config_fixture.config(
+ group='revoke',
+ driver='keystone.contrib.revoke.backends.kvs.Revoke')
+ self.config_fixture.config(
+ group='token',
+ provider='keystone.token.providers.pki.Provider',
+ revoke_by_id=False)
+
+ def setUp(self):
+ """Setup for Token Revoking Test Cases.
+
+ As well as the usual housekeeping, create a set of domains,
+ users, groups, roles and projects for the subsequent tests:
+
+ - Two domains: A & B
+ - Three users (1, 2 and 3)
+ - Three groups (1, 2 and 3)
+ - Two roles (1 and 2)
+ - DomainA owns user1, domainB owns user2 and user3
+ - DomainA owns group1 and group2, domainB owns group3
+ - User1 and user2 are members of group1
+ - User3 is a member of group2
+ - Two projects: A & B, both in domainA
+ - Group1 has role1 on Project A and B, meaning that user1 and user2
+ will get these roles by virtue of membership
+ - User1, 2 and 3 have role1 assigned to projectA
+ - Group1 has role1 on Project A and B, meaning that user1 and user2
+ will get role1 (duplicated) by virtue of membership
+ - User1 has role2 assigned to domainA
+
+ """
+ super(TestTokenRevokeById, self).setUp()
+
+ # Start by creating a couple of domains and projects
+ self.domainA = self.new_domain_ref()
+ self.resource_api.create_domain(self.domainA['id'], self.domainA)
+ self.domainB = self.new_domain_ref()
+ self.resource_api.create_domain(self.domainB['id'], self.domainB)
+ self.projectA = self.new_project_ref(domain_id=self.domainA['id'])
+ self.resource_api.create_project(self.projectA['id'], self.projectA)
+ self.projectB = self.new_project_ref(domain_id=self.domainA['id'])
+ self.resource_api.create_project(self.projectB['id'], self.projectB)
+
+ # Now create some users
+ self.user1 = self.new_user_ref(
+ domain_id=self.domainA['id'])
+ password = self.user1['password']
+ self.user1 = self.identity_api.create_user(self.user1)
+ self.user1['password'] = password
+
+ self.user2 = self.new_user_ref(
+ domain_id=self.domainB['id'])
+ password = self.user2['password']
+ self.user2 = self.identity_api.create_user(self.user2)
+ self.user2['password'] = password
+
+ self.user3 = self.new_user_ref(
+ domain_id=self.domainB['id'])
+ password = self.user3['password']
+ self.user3 = self.identity_api.create_user(self.user3)
+ self.user3['password'] = password
+
+ self.group1 = self.new_group_ref(
+ domain_id=self.domainA['id'])
+ self.group1 = self.identity_api.create_group(self.group1)
+
+ self.group2 = self.new_group_ref(
+ domain_id=self.domainA['id'])
+ self.group2 = self.identity_api.create_group(self.group2)
+
+ self.group3 = self.new_group_ref(
+ domain_id=self.domainB['id'])
+ self.group3 = self.identity_api.create_group(self.group3)
+
+ self.identity_api.add_user_to_group(self.user1['id'],
+ self.group1['id'])
+ self.identity_api.add_user_to_group(self.user2['id'],
+ self.group1['id'])
+ self.identity_api.add_user_to_group(self.user3['id'],
+ self.group2['id'])
+
+ self.role1 = self.new_role_ref()
+ self.role_api.create_role(self.role1['id'], self.role1)
+ self.role2 = self.new_role_ref()
+ self.role_api.create_role(self.role2['id'], self.role2)
+
+ self.assignment_api.create_grant(self.role2['id'],
+ user_id=self.user1['id'],
+ domain_id=self.domainA['id'])
+ self.assignment_api.create_grant(self.role1['id'],
+ user_id=self.user1['id'],
+ project_id=self.projectA['id'])
+ self.assignment_api.create_grant(self.role1['id'],
+ user_id=self.user2['id'],
+ project_id=self.projectA['id'])
+ self.assignment_api.create_grant(self.role1['id'],
+ user_id=self.user3['id'],
+ project_id=self.projectA['id'])
+ self.assignment_api.create_grant(self.role1['id'],
+ group_id=self.group1['id'],
+ project_id=self.projectA['id'])
+
+ def test_unscoped_token_remains_valid_after_role_assignment(self):
+ unscoped_token = self.get_requested_token(
+ self.build_authentication_request(
+ user_id=self.user1['id'],
+ password=self.user1['password']))
+
+ scoped_token = self.get_requested_token(
+ self.build_authentication_request(
+ token=unscoped_token,
+ project_id=self.projectA['id']))
+
+ # confirm both tokens are valid
+ self.head('/auth/tokens',
+ headers={'X-Subject-Token': unscoped_token},
+ expected_status=200)
+ self.head('/auth/tokens',
+ headers={'X-Subject-Token': scoped_token},
+ expected_status=200)
+
+ # create a new role
+ role = self.new_role_ref()
+ self.role_api.create_role(role['id'], role)
+
+ # assign a new role
+ self.put(
+ '/projects/%(project_id)s/users/%(user_id)s/roles/%(role_id)s' % {
+ 'project_id': self.projectA['id'],
+ 'user_id': self.user1['id'],
+ 'role_id': role['id']})
+
+ # both tokens should remain valid
+ self.head('/auth/tokens',
+ headers={'X-Subject-Token': unscoped_token},
+ expected_status=200)
+ self.head('/auth/tokens',
+ headers={'X-Subject-Token': scoped_token},
+ expected_status=200)
+
+ def test_deleting_user_grant_revokes_token(self):
+ """Test deleting a user grant revokes token.
+
+ Test Plan:
+
+ - Get a token for user1, scoped to ProjectA
+ - Delete the grant user1 has on ProjectA
+ - Check token is no longer valid
+
+ """
+ auth_data = self.build_authentication_request(
+ user_id=self.user1['id'],
+ password=self.user1['password'],
+ project_id=self.projectA['id'])
+ token = self.get_requested_token(auth_data)
+ # Confirm token is valid
+ self.head('/auth/tokens',
+ headers={'X-Subject-Token': token},
+ expected_status=200)
+ # Delete the grant, which should invalidate the token
+ grant_url = (
+ '/projects/%(project_id)s/users/%(user_id)s/'
+ 'roles/%(role_id)s' % {
+ 'project_id': self.projectA['id'],
+ 'user_id': self.user1['id'],
+ 'role_id': self.role1['id']})
+ self.delete(grant_url)
+ self.head('/auth/tokens',
+ headers={'X-Subject-Token': token},
+ expected_status=404)
+
+ def role_data_fixtures(self):
+ self.projectC = self.new_project_ref(domain_id=self.domainA['id'])
+ self.resource_api.create_project(self.projectC['id'], self.projectC)
+ self.user4 = self.new_user_ref(domain_id=self.domainB['id'])
+ password = self.user4['password']
+ self.user4 = self.identity_api.create_user(self.user4)
+ self.user4['password'] = password
+ self.user5 = self.new_user_ref(
+ domain_id=self.domainA['id'])
+ password = self.user5['password']
+ self.user5 = self.identity_api.create_user(self.user5)
+ self.user5['password'] = password
+ self.user6 = self.new_user_ref(
+ domain_id=self.domainA['id'])
+ password = self.user6['password']
+ self.user6 = self.identity_api.create_user(self.user6)
+ self.user6['password'] = password
+ self.identity_api.add_user_to_group(self.user5['id'],
+ self.group1['id'])
+ self.assignment_api.create_grant(self.role1['id'],
+ group_id=self.group1['id'],
+ project_id=self.projectB['id'])
+ self.assignment_api.create_grant(self.role2['id'],
+ user_id=self.user4['id'],
+ project_id=self.projectC['id'])
+ self.assignment_api.create_grant(self.role1['id'],
+ user_id=self.user6['id'],
+ project_id=self.projectA['id'])
+ self.assignment_api.create_grant(self.role1['id'],
+ user_id=self.user6['id'],
+ domain_id=self.domainA['id'])
+
+ def test_deleting_role_revokes_token(self):
+ """Test deleting a role revokes token.
+
+ Add some additional test data, namely:
+ - A third project (project C)
+ - Three additional users - user4 owned by domainB and user5 and 6
+ owned by domainA (different domain ownership should not affect
+ the test results, just provided to broaden test coverage)
+ - User5 is a member of group1
+ - Group1 gets an additional assignment - role1 on projectB as
+ well as its existing role1 on projectA
+ - User4 has role2 on Project C
+ - User6 has role1 on projectA and domainA
+ - This allows us to create 5 tokens by virtue of different types
+ of role assignment:
+ - user1, scoped to ProjectA by virtue of user role1 assignment
+ - user5, scoped to ProjectB by virtue of group role1 assignment
+ - user4, scoped to ProjectC by virtue of user role2 assignment
+ - user6, scoped to ProjectA by virtue of user role1 assignment
+ - user6, scoped to DomainA by virtue of user role1 assignment
+ - role1 is then deleted
+ - Check the tokens on Project A and B, and DomainA are revoked,
+ but not the one for Project C
+
+ """
+
+ self.role_data_fixtures()
+
+ # Now we are ready to start issuing requests
+ auth_data = self.build_authentication_request(
+ user_id=self.user1['id'],
+ password=self.user1['password'],
+ project_id=self.projectA['id'])
+ tokenA = self.get_requested_token(auth_data)
+ auth_data = self.build_authentication_request(
+ user_id=self.user5['id'],
+ password=self.user5['password'],
+ project_id=self.projectB['id'])
+ tokenB = self.get_requested_token(auth_data)
+ auth_data = self.build_authentication_request(
+ user_id=self.user4['id'],
+ password=self.user4['password'],
+ project_id=self.projectC['id'])
+ tokenC = self.get_requested_token(auth_data)
+ auth_data = self.build_authentication_request(
+ user_id=self.user6['id'],
+ password=self.user6['password'],
+ project_id=self.projectA['id'])
+ tokenD = self.get_requested_token(auth_data)
+ auth_data = self.build_authentication_request(
+ user_id=self.user6['id'],
+ password=self.user6['password'],
+ domain_id=self.domainA['id'])
+ tokenE = self.get_requested_token(auth_data)
+ # Confirm tokens are valid
+ self.head('/auth/tokens',
+ headers={'X-Subject-Token': tokenA},
+ expected_status=200)
+ self.head('/auth/tokens',
+ headers={'X-Subject-Token': tokenB},
+ expected_status=200)
+ self.head('/auth/tokens',
+ headers={'X-Subject-Token': tokenC},
+ expected_status=200)
+ self.head('/auth/tokens',
+ headers={'X-Subject-Token': tokenD},
+ expected_status=200)
+ self.head('/auth/tokens',
+ headers={'X-Subject-Token': tokenE},
+ expected_status=200)
+
+ # Delete the role, which should invalidate the tokens
+ role_url = '/roles/%s' % self.role1['id']
+ self.delete(role_url)
+
+ # Check the tokens that used role1 is invalid
+ self.head('/auth/tokens',
+ headers={'X-Subject-Token': tokenA},
+ expected_status=404)
+ self.head('/auth/tokens',
+ headers={'X-Subject-Token': tokenB},
+ expected_status=404)
+ self.head('/auth/tokens',
+ headers={'X-Subject-Token': tokenD},
+ expected_status=404)
+ self.head('/auth/tokens',
+ headers={'X-Subject-Token': tokenE},
+ expected_status=404)
+
+ # ...but the one using role2 is still valid
+ self.head('/auth/tokens',
+ headers={'X-Subject-Token': tokenC},
+ expected_status=200)
+
+ def test_domain_user_role_assignment_maintains_token(self):
+ """Test user-domain role assignment maintains existing token.
+
+ Test Plan:
+
+ - Get a token for user1, scoped to ProjectA
+ - Create a grant for user1 on DomainB
+ - Check token is still valid
+
+ """
+ auth_data = self.build_authentication_request(
+ user_id=self.user1['id'],
+ password=self.user1['password'],
+ project_id=self.projectA['id'])
+ token = self.get_requested_token(auth_data)
+ # Confirm token is valid
+ self.head('/auth/tokens',
+ headers={'X-Subject-Token': token},
+ expected_status=200)
+ # Assign a role, which should not affect the token
+ grant_url = (
+ '/domains/%(domain_id)s/users/%(user_id)s/'
+ 'roles/%(role_id)s' % {
+ 'domain_id': self.domainB['id'],
+ 'user_id': self.user1['id'],
+ 'role_id': self.role1['id']})
+ self.put(grant_url)
+ self.head('/auth/tokens',
+ headers={'X-Subject-Token': token},
+ expected_status=200)
+
+ def test_disabling_project_revokes_token(self):
+ token = self.get_requested_token(
+ self.build_authentication_request(
+ user_id=self.user3['id'],
+ password=self.user3['password'],
+ project_id=self.projectA['id']))
+
+ # confirm token is valid
+ self.head('/auth/tokens',
+ headers={'X-Subject-Token': token},
+ expected_status=200)
+
+ # disable the project, which should invalidate the token
+ self.patch(
+ '/projects/%(project_id)s' % {'project_id': self.projectA['id']},
+ body={'project': {'enabled': False}})
+
+ # user should no longer have access to the project
+ self.head('/auth/tokens',
+ headers={'X-Subject-Token': token},
+ expected_status=404)
+ self.v3_authenticate_token(
+ self.build_authentication_request(
+ user_id=self.user3['id'],
+ password=self.user3['password'],
+ project_id=self.projectA['id']),
+ expected_status=401)
+
+ def test_deleting_project_revokes_token(self):
+ token = self.get_requested_token(
+ self.build_authentication_request(
+ user_id=self.user3['id'],
+ password=self.user3['password'],
+ project_id=self.projectA['id']))
+
+ # confirm token is valid
+ self.head('/auth/tokens',
+ headers={'X-Subject-Token': token},
+ expected_status=200)
+
+ # delete the project, which should invalidate the token
+ self.delete(
+ '/projects/%(project_id)s' % {'project_id': self.projectA['id']})
+
+ # user should no longer have access to the project
+ self.head('/auth/tokens',
+ headers={'X-Subject-Token': token},
+ expected_status=404)
+ self.v3_authenticate_token(
+ self.build_authentication_request(
+ user_id=self.user3['id'],
+ password=self.user3['password'],
+ project_id=self.projectA['id']),
+ expected_status=401)
+
+ def test_deleting_group_grant_revokes_tokens(self):
+ """Test deleting a group grant revokes tokens.
+
+ Test Plan:
+
+ - Get a token for user1, scoped to ProjectA
+ - Get a token for user2, scoped to ProjectA
+ - Get a token for user3, scoped to ProjectA
+ - Delete the grant group1 has on ProjectA
+ - Check tokens for user1 & user2 are no longer valid,
+ since user1 and user2 are members of group1
+ - Check token for user3 is still valid
+
+ """
+ auth_data = self.build_authentication_request(
+ user_id=self.user1['id'],
+ password=self.user1['password'],
+ project_id=self.projectA['id'])
+ token1 = self.get_requested_token(auth_data)
+ auth_data = self.build_authentication_request(
+ user_id=self.user2['id'],
+ password=self.user2['password'],
+ project_id=self.projectA['id'])
+ token2 = self.get_requested_token(auth_data)
+ auth_data = self.build_authentication_request(
+ user_id=self.user3['id'],
+ password=self.user3['password'],
+ project_id=self.projectA['id'])
+ token3 = self.get_requested_token(auth_data)
+ # Confirm tokens are valid
+ self.head('/auth/tokens',
+ headers={'X-Subject-Token': token1},
+ expected_status=200)
+ self.head('/auth/tokens',
+ headers={'X-Subject-Token': token2},
+ expected_status=200)
+ self.head('/auth/tokens',
+ headers={'X-Subject-Token': token3},
+ expected_status=200)
+ # Delete the group grant, which should invalidate the
+ # tokens for user1 and user2
+ grant_url = (
+ '/projects/%(project_id)s/groups/%(group_id)s/'
+ 'roles/%(role_id)s' % {
+ 'project_id': self.projectA['id'],
+ 'group_id': self.group1['id'],
+ 'role_id': self.role1['id']})
+ self.delete(grant_url)
+ self.head('/auth/tokens',
+ headers={'X-Subject-Token': token1},
+ expected_status=404)
+ self.head('/auth/tokens',
+ headers={'X-Subject-Token': token2},
+ expected_status=404)
+ # But user3's token should still be valid
+ self.head('/auth/tokens',
+ headers={'X-Subject-Token': token3},
+ expected_status=200)
+
+ def test_domain_group_role_assignment_maintains_token(self):
+ """Test domain-group role assignment maintains existing token.
+
+ Test Plan:
+
+ - Get a token for user1, scoped to ProjectA
+ - Create a grant for group1 on DomainB
+ - Check token is still longer valid
+
+ """
+ auth_data = self.build_authentication_request(
+ user_id=self.user1['id'],
+ password=self.user1['password'],
+ project_id=self.projectA['id'])
+ token = self.get_requested_token(auth_data)
+ # Confirm token is valid
+ self.head('/auth/tokens',
+ headers={'X-Subject-Token': token},
+ expected_status=200)
+ # Delete the grant, which should invalidate the token
+ grant_url = (
+ '/domains/%(domain_id)s/groups/%(group_id)s/'
+ 'roles/%(role_id)s' % {
+ 'domain_id': self.domainB['id'],
+ 'group_id': self.group1['id'],
+ 'role_id': self.role1['id']})
+ self.put(grant_url)
+ self.head('/auth/tokens',
+ headers={'X-Subject-Token': token},
+ expected_status=200)
+
+ def test_group_membership_changes_revokes_token(self):
+ """Test add/removal to/from group revokes token.
+
+ Test Plan:
+
+ - Get a token for user1, scoped to ProjectA
+ - Get a token for user2, scoped to ProjectA
+ - Remove user1 from group1
+ - Check token for user1 is no longer valid
+ - Check token for user2 is still valid, even though
+ user2 is also part of group1
+ - Add user2 to group2
+ - Check token for user2 is now no longer valid
+
+ """
+ auth_data = self.build_authentication_request(
+ user_id=self.user1['id'],
+ password=self.user1['password'],
+ project_id=self.projectA['id'])
+ token1 = self.get_requested_token(auth_data)
+ auth_data = self.build_authentication_request(
+ user_id=self.user2['id'],
+ password=self.user2['password'],
+ project_id=self.projectA['id'])
+ token2 = self.get_requested_token(auth_data)
+ # Confirm tokens are valid
+ self.head('/auth/tokens',
+ headers={'X-Subject-Token': token1},
+ expected_status=200)
+ self.head('/auth/tokens',
+ headers={'X-Subject-Token': token2},
+ expected_status=200)
+ # Remove user1 from group1, which should invalidate
+ # the token
+ self.delete('/groups/%(group_id)s/users/%(user_id)s' % {
+ 'group_id': self.group1['id'],
+ 'user_id': self.user1['id']})
+ self.head('/auth/tokens',
+ headers={'X-Subject-Token': token1},
+ expected_status=404)
+ # But user2's token should still be valid
+ self.head('/auth/tokens',
+ headers={'X-Subject-Token': token2},
+ expected_status=200)
+ # Adding user2 to a group should not invalidate token
+ self.put('/groups/%(group_id)s/users/%(user_id)s' % {
+ 'group_id': self.group2['id'],
+ 'user_id': self.user2['id']})
+ self.head('/auth/tokens',
+ headers={'X-Subject-Token': token2},
+ expected_status=200)
+
+ def test_removing_role_assignment_does_not_affect_other_users(self):
+ """Revoking a role from one user should not affect other users."""
+ user1_token = self.get_requested_token(
+ self.build_authentication_request(
+ user_id=self.user1['id'],
+ password=self.user1['password'],
+ project_id=self.projectA['id']))
+
+ user3_token = self.get_requested_token(
+ self.build_authentication_request(
+ user_id=self.user3['id'],
+ password=self.user3['password'],
+ project_id=self.projectA['id']))
+
+ # delete relationships between user1 and projectA from setUp
+ self.delete(
+ '/projects/%(project_id)s/users/%(user_id)s/roles/%(role_id)s' % {
+ 'project_id': self.projectA['id'],
+ 'user_id': self.user1['id'],
+ 'role_id': self.role1['id']})
+ self.delete(
+ '/projects/%(project_id)s/groups/%(group_id)s/roles/%(role_id)s' %
+ {'project_id': self.projectA['id'],
+ 'group_id': self.group1['id'],
+ 'role_id': self.role1['id']})
+
+ # authorization for the first user should now fail
+ self.head('/auth/tokens',
+ headers={'X-Subject-Token': user1_token},
+ expected_status=404)
+ self.v3_authenticate_token(
+ self.build_authentication_request(
+ user_id=self.user1['id'],
+ password=self.user1['password'],
+ project_id=self.projectA['id']),
+ expected_status=401)
+
+ # authorization for the second user should still succeed
+ self.head('/auth/tokens',
+ headers={'X-Subject-Token': user3_token},
+ expected_status=200)
+ self.v3_authenticate_token(
+ self.build_authentication_request(
+ user_id=self.user3['id'],
+ password=self.user3['password'],
+ project_id=self.projectA['id']))
+
+ def test_deleting_project_deletes_grants(self):
+ # This is to make it a little bit more pretty with PEP8
+ role_path = ('/projects/%(project_id)s/users/%(user_id)s/'
+ 'roles/%(role_id)s')
+ role_path = role_path % {'user_id': self.user['id'],
+ 'project_id': self.projectA['id'],
+ 'role_id': self.role['id']}
+
+ # grant the user a role on the project
+ self.put(role_path)
+
+ # delete the project, which should remove the roles
+ self.delete(
+ '/projects/%(project_id)s' % {'project_id': self.projectA['id']})
+
+ # Make sure that we get a NotFound(404) when heading that role.
+ self.head(role_path, expected_status=404)
+
+ def get_v2_token(self, token=None, project_id=None):
+ body = {'auth': {}, }
+
+ if token:
+ body['auth']['token'] = {
+ 'id': token
+ }
+ else:
+ body['auth']['passwordCredentials'] = {
+ 'username': self.default_domain_user['name'],
+ 'password': self.default_domain_user['password'],
+ }
+
+ if project_id:
+ body['auth']['tenantId'] = project_id
+
+ r = self.admin_request(method='POST', path='/v2.0/tokens', body=body)
+ return r.json_body['access']['token']['id']
+
+ def test_revoke_v2_token_no_check(self):
+ # Test that a V2 token can be revoked without validating it first.
+
+ token = self.get_v2_token()
+
+ self.delete('/auth/tokens',
+ headers={'X-Subject-Token': token},
+ expected_status=204)
+
+ self.head('/auth/tokens',
+ headers={'X-Subject-Token': token},
+ expected_status=404)
+
+ def test_revoke_token_from_token(self):
+ # Test that a scoped token can be requested from an unscoped token,
+ # the scoped token can be revoked, and the unscoped token remains
+ # valid.
+
+ unscoped_token = self.get_requested_token(
+ self.build_authentication_request(
+ user_id=self.user1['id'],
+ password=self.user1['password']))
+
+ # Get a project-scoped token from the unscoped token
+ project_scoped_token = self.get_requested_token(
+ self.build_authentication_request(
+ token=unscoped_token,
+ project_id=self.projectA['id']))
+
+ # Get a domain-scoped token from the unscoped token
+ domain_scoped_token = self.get_requested_token(
+ self.build_authentication_request(
+ token=unscoped_token,
+ domain_id=self.domainA['id']))
+
+ # revoke the project-scoped token.
+ self.delete('/auth/tokens',
+ headers={'X-Subject-Token': project_scoped_token},
+ expected_status=204)
+
+ # The project-scoped token is invalidated.
+ self.head('/auth/tokens',
+ headers={'X-Subject-Token': project_scoped_token},
+ expected_status=404)
+
+ # The unscoped token should still be valid.
+ self.head('/auth/tokens',
+ headers={'X-Subject-Token': unscoped_token},
+ expected_status=200)
+
+ # The domain-scoped token should still be valid.
+ self.head('/auth/tokens',
+ headers={'X-Subject-Token': domain_scoped_token},
+ expected_status=200)
+
+ # revoke the domain-scoped token.
+ self.delete('/auth/tokens',
+ headers={'X-Subject-Token': domain_scoped_token},
+ expected_status=204)
+
+ # The domain-scoped token is invalid.
+ self.head('/auth/tokens',
+ headers={'X-Subject-Token': domain_scoped_token},
+ expected_status=404)
+
+ # The unscoped token should still be valid.
+ self.head('/auth/tokens',
+ headers={'X-Subject-Token': unscoped_token},
+ expected_status=200)
+
+ def test_revoke_token_from_token_v2(self):
+ # Test that a scoped token can be requested from an unscoped token,
+ # the scoped token can be revoked, and the unscoped token remains
+ # valid.
+
+ # FIXME(blk-u): This isn't working correctly. The scoped token should
+ # be revoked. See bug 1347318.
+
+ unscoped_token = self.get_v2_token()
+
+ # Get a project-scoped token from the unscoped token
+ project_scoped_token = self.get_v2_token(
+ token=unscoped_token, project_id=self.default_domain_project['id'])
+
+ # revoke the project-scoped token.
+ self.delete('/auth/tokens',
+ headers={'X-Subject-Token': project_scoped_token},
+ expected_status=204)
+
+ # The project-scoped token is invalidated.
+ self.head('/auth/tokens',
+ headers={'X-Subject-Token': project_scoped_token},
+ expected_status=404)
+
+ # The unscoped token should still be valid.
+ self.head('/auth/tokens',
+ headers={'X-Subject-Token': unscoped_token},
+ expected_status=200)
+
+
+class TestTokenRevokeApi(TestTokenRevokeById):
+ EXTENSION_NAME = 'revoke'
+ EXTENSION_TO_ADD = 'revoke_extension'
+
+ """Test token revocation on the v3 Identity API."""
+ def config_overrides(self):
+ super(TestTokenRevokeApi, self).config_overrides()
+ self.config_fixture.config(
+ group='revoke',
+ driver='keystone.contrib.revoke.backends.kvs.Revoke')
+ self.config_fixture.config(
+ group='token',
+ provider='keystone.token.providers.pki.Provider',
+ revoke_by_id=False)
+
+ def assertValidDeletedProjectResponse(self, events_response, project_id):
+ events = events_response['events']
+ self.assertEqual(1, len(events))
+ self.assertEqual(project_id, events[0]['project_id'])
+ self.assertIsNotNone(events[0]['issued_before'])
+ self.assertIsNotNone(events_response['links'])
+ del (events_response['events'][0]['issued_before'])
+ del (events_response['links'])
+ expected_response = {'events': [{'project_id': project_id}]}
+ self.assertEqual(expected_response, events_response)
+
+ def assertDomainInList(self, events_response, domain_id):
+ events = events_response['events']
+ self.assertEqual(1, len(events))
+ self.assertEqual(domain_id, events[0]['domain_id'])
+ self.assertIsNotNone(events[0]['issued_before'])
+ self.assertIsNotNone(events_response['links'])
+ del (events_response['events'][0]['issued_before'])
+ del (events_response['links'])
+ expected_response = {'events': [{'domain_id': domain_id}]}
+ self.assertEqual(expected_response, events_response)
+
+ def assertValidRevokedTokenResponse(self, events_response, **kwargs):
+ events = events_response['events']
+ self.assertEqual(1, len(events))
+ for k, v in six.iteritems(kwargs):
+ self.assertEqual(v, events[0].get(k))
+ self.assertIsNotNone(events[0]['issued_before'])
+ self.assertIsNotNone(events_response['links'])
+ del (events_response['events'][0]['issued_before'])
+ del (events_response['links'])
+
+ expected_response = {'events': [kwargs]}
+ self.assertEqual(expected_response, events_response)
+
+ def test_revoke_token(self):
+ scoped_token = self.get_scoped_token()
+ headers = {'X-Subject-Token': scoped_token}
+ response = self.get('/auth/tokens', headers=headers,
+ expected_status=200).json_body['token']
+
+ self.delete('/auth/tokens', headers=headers, expected_status=204)
+ self.head('/auth/tokens', headers=headers, expected_status=404)
+ events_response = self.get('/OS-REVOKE/events',
+ expected_status=200).json_body
+ self.assertValidRevokedTokenResponse(events_response,
+ audit_id=response['audit_ids'][0])
+
+ def test_revoke_v2_token(self):
+ token = self.get_v2_token()
+ headers = {'X-Subject-Token': token}
+ response = self.get('/auth/tokens', headers=headers,
+ expected_status=200).json_body['token']
+ self.delete('/auth/tokens', headers=headers, expected_status=204)
+ self.head('/auth/tokens', headers=headers, expected_status=404)
+ events_response = self.get('/OS-REVOKE/events',
+ expected_status=200).json_body
+
+ self.assertValidRevokedTokenResponse(
+ events_response,
+ audit_id=response['audit_ids'][0])
+
+ def test_revoke_by_id_false_410(self):
+ self.get('/auth/tokens/OS-PKI/revoked', expected_status=410)
+
+ def test_list_delete_project_shows_in_event_list(self):
+ self.role_data_fixtures()
+ events = self.get('/OS-REVOKE/events',
+ expected_status=200).json_body['events']
+ self.assertEqual([], events)
+ self.delete(
+ '/projects/%(project_id)s' % {'project_id': self.projectA['id']})
+ events_response = self.get('/OS-REVOKE/events',
+ expected_status=200).json_body
+
+ self.assertValidDeletedProjectResponse(events_response,
+ self.projectA['id'])
+
+ def test_disable_domain_shows_in_event_list(self):
+ events = self.get('/OS-REVOKE/events',
+ expected_status=200).json_body['events']
+ self.assertEqual([], events)
+ disable_body = {'domain': {'enabled': False}}
+ self.patch(
+ '/domains/%(project_id)s' % {'project_id': self.domainA['id']},
+ body=disable_body)
+
+ events = self.get('/OS-REVOKE/events',
+ expected_status=200).json_body
+
+ self.assertDomainInList(events, self.domainA['id'])
+
+ def assertEventDataInList(self, events, **kwargs):
+ found = False
+ for e in events:
+ for key, value in six.iteritems(kwargs):
+ try:
+ if e[key] != value:
+ break
+ except KeyError:
+ # Break the loop and present a nice error instead of
+ # KeyError
+ break
+ else:
+ # If the value of the event[key] matches the value of the kwarg
+ # for each item in kwargs, the event was fully matched and
+ # the assertTrue below should succeed.
+ found = True
+ self.assertTrue(found,
+ 'event with correct values not in list, expected to '
+ 'find event with key-value pairs. Expected: '
+ '"%(expected)s" Events: "%(events)s"' %
+ {'expected': ','.join(
+ ["'%s=%s'" % (k, v) for k, v in six.iteritems(
+ kwargs)]),
+ 'events': events})
+
+ def test_list_delete_token_shows_in_event_list(self):
+ self.role_data_fixtures()
+ events = self.get('/OS-REVOKE/events',
+ expected_status=200).json_body['events']
+ self.assertEqual([], events)
+
+ scoped_token = self.get_scoped_token()
+ headers = {'X-Subject-Token': scoped_token}
+ auth_req = self.build_authentication_request(token=scoped_token)
+ response = self.v3_authenticate_token(auth_req)
+ token2 = response.json_body['token']
+ headers2 = {'X-Subject-Token': response.headers['X-Subject-Token']}
+
+ response = self.v3_authenticate_token(auth_req)
+ response.json_body['token']
+ headers3 = {'X-Subject-Token': response.headers['X-Subject-Token']}
+
+ self.head('/auth/tokens', headers=headers, expected_status=200)
+ self.head('/auth/tokens', headers=headers2, expected_status=200)
+ self.head('/auth/tokens', headers=headers3, expected_status=200)
+
+ self.delete('/auth/tokens', headers=headers, expected_status=204)
+ # NOTE(ayoung): not deleting token3, as it should be deleted
+ # by previous
+ events_response = self.get('/OS-REVOKE/events',
+ expected_status=200).json_body
+ events = events_response['events']
+ self.assertEqual(1, len(events))
+ self.assertEventDataInList(
+ events,
+ audit_id=token2['audit_ids'][1])
+ self.head('/auth/tokens', headers=headers, expected_status=404)
+ self.head('/auth/tokens', headers=headers2, expected_status=200)
+ self.head('/auth/tokens', headers=headers3, expected_status=200)
+
+ def test_list_with_filter(self):
+
+ self.role_data_fixtures()
+ events = self.get('/OS-REVOKE/events',
+ expected_status=200).json_body['events']
+ self.assertEqual(0, len(events))
+
+ scoped_token = self.get_scoped_token()
+ headers = {'X-Subject-Token': scoped_token}
+ auth = self.build_authentication_request(token=scoped_token)
+ headers2 = {'X-Subject-Token': self.get_requested_token(auth)}
+ self.delete('/auth/tokens', headers=headers, expected_status=204)
+ self.delete('/auth/tokens', headers=headers2, expected_status=204)
+
+ events = self.get('/OS-REVOKE/events',
+ expected_status=200).json_body['events']
+
+ self.assertEqual(2, len(events))
+ future = timeutils.isotime(timeutils.utcnow() +
+ datetime.timedelta(seconds=1000))
+
+ events = self.get('/OS-REVOKE/events?since=%s' % (future),
+ expected_status=200).json_body['events']
+ self.assertEqual(0, len(events))
+
+
+class TestAuthExternalDisabled(test_v3.RestfulTestCase):
+ def config_overrides(self):
+ super(TestAuthExternalDisabled, self).config_overrides()
+ self.config_fixture.config(
+ group='auth',
+ methods=['password', 'token'])
+
+ def test_remote_user_disabled(self):
+ api = auth.controllers.Auth()
+ remote_user = '%s@%s' % (self.user['name'], self.domain['name'])
+ context, auth_info, auth_context = self.build_external_auth_request(
+ remote_user)
+ self.assertRaises(exception.Unauthorized,
+ api.authenticate,
+ context,
+ auth_info,
+ auth_context)
+
+
+class TestAuthExternalLegacyDefaultDomain(test_v3.RestfulTestCase):
+ content_type = 'json'
+
+ def config_overrides(self):
+ super(TestAuthExternalLegacyDefaultDomain, self).config_overrides()
+ self.auth_plugin_config_override(
+ methods=['external', 'password', 'token'],
+ external='keystone.auth.plugins.external.LegacyDefaultDomain',
+ password='keystone.auth.plugins.password.Password',
+ token='keystone.auth.plugins.token.Token')
+
+ def test_remote_user_no_realm(self):
+ self.config_fixture.config(group='auth', methods='external')
+ api = auth.controllers.Auth()
+ context, auth_info, auth_context = self.build_external_auth_request(
+ self.default_domain_user['name'])
+ api.authenticate(context, auth_info, auth_context)
+ self.assertEqual(auth_context['user_id'],
+ self.default_domain_user['id'])
+
+ def test_remote_user_no_domain(self):
+ api = auth.controllers.Auth()
+ context, auth_info, auth_context = self.build_external_auth_request(
+ self.user['name'])
+ self.assertRaises(exception.Unauthorized,
+ api.authenticate,
+ context,
+ auth_info,
+ auth_context)
+
+
+class TestAuthExternalLegacyDomain(test_v3.RestfulTestCase):
+ content_type = 'json'
+
+ def config_overrides(self):
+ super(TestAuthExternalLegacyDomain, self).config_overrides()
+ self.auth_plugin_config_override(
+ methods=['external', 'password', 'token'],
+ external='keystone.auth.plugins.external.LegacyDomain',
+ password='keystone.auth.plugins.password.Password',
+ token='keystone.auth.plugins.token.Token')
+
+ def test_remote_user_with_realm(self):
+ api = auth.controllers.Auth()
+ remote_user = '%s@%s' % (self.user['name'], self.domain['name'])
+ context, auth_info, auth_context = self.build_external_auth_request(
+ remote_user)
+
+ api.authenticate(context, auth_info, auth_context)
+ self.assertEqual(auth_context['user_id'], self.user['id'])
+
+ # Now test to make sure the user name can, itself, contain the
+ # '@' character.
+ user = {'name': 'myname@mydivision'}
+ self.identity_api.update_user(self.user['id'], user)
+ remote_user = '%s@%s' % (user['name'], self.domain['name'])
+ context, auth_info, auth_context = self.build_external_auth_request(
+ remote_user)
+
+ api.authenticate(context, auth_info, auth_context)
+ self.assertEqual(auth_context['user_id'], self.user['id'])
+
+ def test_project_id_scoped_with_remote_user(self):
+ self.config_fixture.config(group='token', bind=['kerberos'])
+ auth_data = self.build_authentication_request(
+ project_id=self.project['id'])
+ remote_user = '%s@%s' % (self.user['name'], self.domain['name'])
+ self.admin_app.extra_environ.update({'REMOTE_USER': remote_user,
+ 'AUTH_TYPE': 'Negotiate'})
+ r = self.v3_authenticate_token(auth_data)
+ token = self.assertValidProjectScopedTokenResponse(r)
+ self.assertEqual(token['bind']['kerberos'], self.user['name'])
+
+ def test_unscoped_bind_with_remote_user(self):
+ self.config_fixture.config(group='token', bind=['kerberos'])
+ auth_data = self.build_authentication_request()
+ remote_user = '%s@%s' % (self.user['name'], self.domain['name'])
+ self.admin_app.extra_environ.update({'REMOTE_USER': remote_user,
+ 'AUTH_TYPE': 'Negotiate'})
+ r = self.v3_authenticate_token(auth_data)
+ token = self.assertValidUnscopedTokenResponse(r)
+ self.assertEqual(token['bind']['kerberos'], self.user['name'])
+
+
+class TestAuthExternalDomain(test_v3.RestfulTestCase):
+ content_type = 'json'
+
+ def config_overrides(self):
+ super(TestAuthExternalDomain, self).config_overrides()
+ self.kerberos = False
+ self.auth_plugin_config_override(
+ methods=['external', 'password', 'token'],
+ external='keystone.auth.plugins.external.Domain',
+ password='keystone.auth.plugins.password.Password',
+ token='keystone.auth.plugins.token.Token')
+
+ def test_remote_user_with_realm(self):
+ api = auth.controllers.Auth()
+ remote_user = self.user['name']
+ remote_domain = self.domain['name']
+ context, auth_info, auth_context = self.build_external_auth_request(
+ remote_user, remote_domain=remote_domain, kerberos=self.kerberos)
+
+ api.authenticate(context, auth_info, auth_context)
+ self.assertEqual(auth_context['user_id'], self.user['id'])
+
+ # Now test to make sure the user name can, itself, contain the
+ # '@' character.
+ user = {'name': 'myname@mydivision'}
+ self.identity_api.update_user(self.user['id'], user)
+ remote_user = user['name']
+ context, auth_info, auth_context = self.build_external_auth_request(
+ remote_user, remote_domain=remote_domain, kerberos=self.kerberos)
+
+ api.authenticate(context, auth_info, auth_context)
+ self.assertEqual(auth_context['user_id'], self.user['id'])
+
+ def test_project_id_scoped_with_remote_user(self):
+ self.config_fixture.config(group='token', bind=['kerberos'])
+ auth_data = self.build_authentication_request(
+ project_id=self.project['id'],
+ kerberos=self.kerberos)
+ remote_user = self.user['name']
+ remote_domain = self.domain['name']
+ self.admin_app.extra_environ.update({'REMOTE_USER': remote_user,
+ 'REMOTE_DOMAIN': remote_domain,
+ 'AUTH_TYPE': 'Negotiate'})
+ r = self.v3_authenticate_token(auth_data)
+ token = self.assertValidProjectScopedTokenResponse(r)
+ self.assertEqual(token['bind']['kerberos'], self.user['name'])
+
+ def test_unscoped_bind_with_remote_user(self):
+ self.config_fixture.config(group='token', bind=['kerberos'])
+ auth_data = self.build_authentication_request(kerberos=self.kerberos)
+ remote_user = self.user['name']
+ remote_domain = self.domain['name']
+ self.admin_app.extra_environ.update({'REMOTE_USER': remote_user,
+ 'REMOTE_DOMAIN': remote_domain,
+ 'AUTH_TYPE': 'Negotiate'})
+ r = self.v3_authenticate_token(auth_data)
+ token = self.assertValidUnscopedTokenResponse(r)
+ self.assertEqual(token['bind']['kerberos'], self.user['name'])
+
+
+class TestAuthKerberos(TestAuthExternalDomain):
+
+ def config_overrides(self):
+ super(TestAuthKerberos, self).config_overrides()
+ self.kerberos = True
+ self.auth_plugin_config_override(
+ methods=['kerberos', 'password', 'token'],
+ kerberos='keystone.auth.plugins.external.KerberosDomain',
+ password='keystone.auth.plugins.password.Password',
+ token='keystone.auth.plugins.token.Token')
+
+
+class TestAuth(test_v3.RestfulTestCase):
+
+ def test_unscoped_token_with_user_id(self):
+ auth_data = self.build_authentication_request(
+ user_id=self.user['id'],
+ password=self.user['password'])
+ r = self.v3_authenticate_token(auth_data)
+ self.assertValidUnscopedTokenResponse(r)
+
+ def test_unscoped_token_with_user_domain_id(self):
+ auth_data = self.build_authentication_request(
+ username=self.user['name'],
+ user_domain_id=self.domain['id'],
+ password=self.user['password'])
+ r = self.v3_authenticate_token(auth_data)
+ self.assertValidUnscopedTokenResponse(r)
+
+ def test_unscoped_token_with_user_domain_name(self):
+ auth_data = self.build_authentication_request(
+ username=self.user['name'],
+ user_domain_name=self.domain['name'],
+ password=self.user['password'])
+ r = self.v3_authenticate_token(auth_data)
+ self.assertValidUnscopedTokenResponse(r)
+
+ def test_project_id_scoped_token_with_user_id(self):
+ auth_data = self.build_authentication_request(
+ user_id=self.user['id'],
+ password=self.user['password'],
+ project_id=self.project['id'])
+ r = self.v3_authenticate_token(auth_data)
+ self.assertValidProjectScopedTokenResponse(r)
+
+ def _second_project_as_default(self):
+ ref = self.new_project_ref(domain_id=self.domain_id)
+ r = self.post('/projects', body={'project': ref})
+ project = self.assertValidProjectResponse(r, ref)
+
+ # grant the user a role on the project
+ self.put(
+ '/projects/%(project_id)s/users/%(user_id)s/roles/%(role_id)s' % {
+ 'user_id': self.user['id'],
+ 'project_id': project['id'],
+ 'role_id': self.role['id']})
+
+ # set the user's preferred project
+ body = {'user': {'default_project_id': project['id']}}
+ r = self.patch('/users/%(user_id)s' % {
+ 'user_id': self.user['id']},
+ body=body)
+ self.assertValidUserResponse(r)
+
+ return project
+
+ def test_default_project_id_scoped_token_with_user_id(self):
+ project = self._second_project_as_default()
+
+ # attempt to authenticate without requesting a project
+ auth_data = self.build_authentication_request(
+ user_id=self.user['id'],
+ password=self.user['password'])
+ r = self.v3_authenticate_token(auth_data)
+ self.assertValidProjectScopedTokenResponse(r)
+ self.assertEqual(r.result['token']['project']['id'], project['id'])
+
+ def test_default_project_id_scoped_token_with_user_id_no_catalog(self):
+ project = self._second_project_as_default()
+
+ # attempt to authenticate without requesting a project
+ auth_data = self.build_authentication_request(
+ user_id=self.user['id'],
+ password=self.user['password'])
+ r = self.post('/auth/tokens?nocatalog', body=auth_data, noauth=True)
+ self.assertValidProjectScopedTokenResponse(r, require_catalog=False)
+ self.assertEqual(r.result['token']['project']['id'], project['id'])
+
+ def test_explicit_unscoped_token(self):
+ self._second_project_as_default()
+
+ # attempt to authenticate without requesting a project
+ auth_data = self.build_authentication_request(
+ user_id=self.user['id'],
+ password=self.user['password'],
+ unscoped="unscoped")
+ r = self.post('/auth/tokens', body=auth_data, noauth=True)
+
+ self.assertIsNone(r.result['token'].get('project'))
+ self.assertIsNone(r.result['token'].get('domain'))
+ self.assertIsNone(r.result['token'].get('scope'))
+
+ def test_implicit_project_id_scoped_token_with_user_id_no_catalog(self):
+ # attempt to authenticate without requesting a project
+ auth_data = self.build_authentication_request(
+ user_id=self.user['id'],
+ password=self.user['password'],
+ project_id=self.project['id'])
+ r = self.post('/auth/tokens?nocatalog', body=auth_data, noauth=True)
+ self.assertValidProjectScopedTokenResponse(r, require_catalog=False)
+ self.assertEqual(r.result['token']['project']['id'],
+ self.project['id'])
+
+ def test_auth_catalog_attributes(self):
+ auth_data = self.build_authentication_request(
+ user_id=self.user['id'],
+ password=self.user['password'],
+ project_id=self.project['id'])
+ r = self.v3_authenticate_token(auth_data)
+
+ catalog = r.result['token']['catalog']
+ self.assertEqual(1, len(catalog))
+ catalog = catalog[0]
+
+ self.assertEqual(self.service['id'], catalog['id'])
+ self.assertEqual(self.service['name'], catalog['name'])
+ self.assertEqual(self.service['type'], catalog['type'])
+
+ endpoint = catalog['endpoints']
+ self.assertEqual(1, len(endpoint))
+ endpoint = endpoint[0]
+
+ self.assertEqual(self.endpoint['id'], endpoint['id'])
+ self.assertEqual(self.endpoint['interface'], endpoint['interface'])
+ self.assertEqual(self.endpoint['region_id'], endpoint['region_id'])
+ self.assertEqual(self.endpoint['url'], endpoint['url'])
+
+ def _check_disabled_endpoint_result(self, catalog, disabled_endpoint_id):
+ endpoints = catalog[0]['endpoints']
+ endpoint_ids = [ep['id'] for ep in endpoints]
+ self.assertEqual([self.endpoint_id], endpoint_ids)
+
+ def test_auth_catalog_disabled_service(self):
+ """On authenticate, get a catalog that excludes disabled services."""
+ # although the child endpoint is enabled, the service is disabled
+ self.assertTrue(self.endpoint['enabled'])
+ self.catalog_api.update_service(
+ self.endpoint['service_id'], {'enabled': False})
+ service = self.catalog_api.get_service(self.endpoint['service_id'])
+ self.assertFalse(service['enabled'])
+
+ auth_data = self.build_authentication_request(
+ user_id=self.user['id'],
+ password=self.user['password'],
+ project_id=self.project['id'])
+ r = self.v3_authenticate_token(auth_data)
+
+ self.assertEqual([], r.result['token']['catalog'])
+
+ def test_auth_catalog_disabled_endpoint(self):
+ """On authenticate, get a catalog that excludes disabled endpoints."""
+
+ # Create a disabled endpoint that's like the enabled one.
+ disabled_endpoint_ref = copy.copy(self.endpoint)
+ disabled_endpoint_id = uuid.uuid4().hex
+ disabled_endpoint_ref.update({
+ 'id': disabled_endpoint_id,
+ 'enabled': False,
+ 'interface': 'internal'
+ })
+ self.catalog_api.create_endpoint(disabled_endpoint_id,
+ disabled_endpoint_ref)
+
+ auth_data = self.build_authentication_request(
+ user_id=self.user['id'],
+ password=self.user['password'],
+ project_id=self.project['id'])
+ r = self.v3_authenticate_token(auth_data)
+
+ self._check_disabled_endpoint_result(r.result['token']['catalog'],
+ disabled_endpoint_id)
+
+ def test_project_id_scoped_token_with_user_id_401(self):
+ project = self.new_project_ref(domain_id=self.domain_id)
+ self.resource_api.create_project(project['id'], project)
+
+ auth_data = self.build_authentication_request(
+ user_id=self.user['id'],
+ password=self.user['password'],
+ project_id=project['id'])
+ self.v3_authenticate_token(auth_data, expected_status=401)
+
+ def test_user_and_group_roles_scoped_token(self):
+ """Test correct roles are returned in scoped token.
+
+ Test Plan:
+
+ - Create a domain, with 1 project, 2 users (user1 and user2)
+ and 2 groups (group1 and group2)
+ - Make user1 a member of group1, user2 a member of group2
+ - Create 8 roles, assigning them to each of the 8 combinations
+ of users/groups on domain/project
+ - Get a project scoped token for user1, checking that the right
+ two roles are returned (one directly assigned, one by virtue
+ of group membership)
+ - Repeat this for a domain scoped token
+ - Make user1 also a member of group2
+ - Get another scoped token making sure the additional role
+ shows up
+ - User2 is just here as a spoiler, to make sure we don't get
+ any roles uniquely assigned to it returned in any of our
+ tokens
+
+ """
+
+ domainA = self.new_domain_ref()
+ self.resource_api.create_domain(domainA['id'], domainA)
+ projectA = self.new_project_ref(domain_id=domainA['id'])
+ self.resource_api.create_project(projectA['id'], projectA)
+
+ user1 = self.new_user_ref(
+ domain_id=domainA['id'])
+ password = user1['password']
+ user1 = self.identity_api.create_user(user1)
+ user1['password'] = password
+
+ user2 = self.new_user_ref(
+ domain_id=domainA['id'])
+ password = user2['password']
+ user2 = self.identity_api.create_user(user2)
+ user2['password'] = password
+
+ group1 = self.new_group_ref(
+ domain_id=domainA['id'])
+ group1 = self.identity_api.create_group(group1)
+
+ group2 = self.new_group_ref(
+ domain_id=domainA['id'])
+ group2 = self.identity_api.create_group(group2)
+
+ self.identity_api.add_user_to_group(user1['id'],
+ group1['id'])
+ self.identity_api.add_user_to_group(user2['id'],
+ group2['id'])
+
+ # Now create all the roles and assign them
+ role_list = []
+ for _ in range(8):
+ role = self.new_role_ref()
+ self.role_api.create_role(role['id'], role)
+ role_list.append(role)
+
+ self.assignment_api.create_grant(role_list[0]['id'],
+ user_id=user1['id'],
+ domain_id=domainA['id'])
+ self.assignment_api.create_grant(role_list[1]['id'],
+ user_id=user1['id'],
+ project_id=projectA['id'])
+ self.assignment_api.create_grant(role_list[2]['id'],
+ user_id=user2['id'],
+ domain_id=domainA['id'])
+ self.assignment_api.create_grant(role_list[3]['id'],
+ user_id=user2['id'],
+ project_id=projectA['id'])
+ self.assignment_api.create_grant(role_list[4]['id'],
+ group_id=group1['id'],
+ domain_id=domainA['id'])
+ self.assignment_api.create_grant(role_list[5]['id'],
+ group_id=group1['id'],
+ project_id=projectA['id'])
+ self.assignment_api.create_grant(role_list[6]['id'],
+ group_id=group2['id'],
+ domain_id=domainA['id'])
+ self.assignment_api.create_grant(role_list[7]['id'],
+ group_id=group2['id'],
+ project_id=projectA['id'])
+
+ # First, get a project scoped token - which should
+ # contain the direct user role and the one by virtue
+ # of group membership
+ auth_data = self.build_authentication_request(
+ user_id=user1['id'],
+ password=user1['password'],
+ project_id=projectA['id'])
+ r = self.v3_authenticate_token(auth_data)
+ token = self.assertValidScopedTokenResponse(r)
+ roles_ids = []
+ for ref in token['roles']:
+ roles_ids.append(ref['id'])
+ self.assertEqual(2, len(token['roles']))
+ self.assertIn(role_list[1]['id'], roles_ids)
+ self.assertIn(role_list[5]['id'], roles_ids)
+
+ # Now the same thing for a domain scoped token
+ auth_data = self.build_authentication_request(
+ user_id=user1['id'],
+ password=user1['password'],
+ domain_id=domainA['id'])
+ r = self.v3_authenticate_token(auth_data)
+ token = self.assertValidScopedTokenResponse(r)
+ roles_ids = []
+ for ref in token['roles']:
+ roles_ids.append(ref['id'])
+ self.assertEqual(2, len(token['roles']))
+ self.assertIn(role_list[0]['id'], roles_ids)
+ self.assertIn(role_list[4]['id'], roles_ids)
+
+ # Finally, add user1 to the 2nd group, and get a new
+ # scoped token - the extra role should now be included
+ # by virtue of the 2nd group
+ self.identity_api.add_user_to_group(user1['id'],
+ group2['id'])
+ auth_data = self.build_authentication_request(
+ user_id=user1['id'],
+ password=user1['password'],
+ project_id=projectA['id'])
+ r = self.v3_authenticate_token(auth_data)
+ token = self.assertValidScopedTokenResponse(r)
+ roles_ids = []
+ for ref in token['roles']:
+ roles_ids.append(ref['id'])
+ self.assertEqual(3, len(token['roles']))
+ self.assertIn(role_list[1]['id'], roles_ids)
+ self.assertIn(role_list[5]['id'], roles_ids)
+ self.assertIn(role_list[7]['id'], roles_ids)
+
+ def test_auth_token_cross_domain_group_and_project(self):
+ """Verify getting a token in cross domain group/project roles."""
+ # create domain, project and group and grant roles to user
+ domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.resource_api.create_domain(domain1['id'], domain1)
+ project1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
+ 'domain_id': domain1['id']}
+ self.resource_api.create_project(project1['id'], project1)
+ user_foo = self.new_user_ref(domain_id=test_v3.DEFAULT_DOMAIN_ID)
+ password = user_foo['password']
+ user_foo = self.identity_api.create_user(user_foo)
+ user_foo['password'] = password
+ role_member = {'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex}
+ self.role_api.create_role(role_member['id'], role_member)
+ role_admin = {'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex}
+ self.role_api.create_role(role_admin['id'], role_admin)
+ role_foo_domain1 = {'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex}
+ self.role_api.create_role(role_foo_domain1['id'], role_foo_domain1)
+ role_group_domain1 = {'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex}
+ self.role_api.create_role(role_group_domain1['id'], role_group_domain1)
+ self.assignment_api.add_user_to_project(project1['id'],
+ user_foo['id'])
+ new_group = {'domain_id': domain1['id'], 'name': uuid.uuid4().hex}
+ new_group = self.identity_api.create_group(new_group)
+ self.identity_api.add_user_to_group(user_foo['id'],
+ new_group['id'])
+ self.assignment_api.create_grant(
+ user_id=user_foo['id'],
+ project_id=project1['id'],
+ role_id=role_member['id'])
+ self.assignment_api.create_grant(
+ group_id=new_group['id'],
+ project_id=project1['id'],
+ role_id=role_admin['id'])
+ self.assignment_api.create_grant(
+ user_id=user_foo['id'],
+ domain_id=domain1['id'],
+ role_id=role_foo_domain1['id'])
+ self.assignment_api.create_grant(
+ group_id=new_group['id'],
+ domain_id=domain1['id'],
+ role_id=role_group_domain1['id'])
+
+ # Get a scoped token for the project
+ auth_data = self.build_authentication_request(
+ username=user_foo['name'],
+ user_domain_id=test_v3.DEFAULT_DOMAIN_ID,
+ password=user_foo['password'],
+ project_name=project1['name'],
+ project_domain_id=domain1['id'])
+
+ r = self.v3_authenticate_token(auth_data)
+ scoped_token = self.assertValidScopedTokenResponse(r)
+ project = scoped_token["project"]
+ roles_ids = []
+ for ref in scoped_token['roles']:
+ roles_ids.append(ref['id'])
+ self.assertEqual(project1['id'], project["id"])
+ self.assertIn(role_member['id'], roles_ids)
+ self.assertIn(role_admin['id'], roles_ids)
+ self.assertNotIn(role_foo_domain1['id'], roles_ids)
+ self.assertNotIn(role_group_domain1['id'], roles_ids)
+
+ def test_project_id_scoped_token_with_user_domain_id(self):
+ auth_data = self.build_authentication_request(
+ username=self.user['name'],
+ user_domain_id=self.domain['id'],
+ password=self.user['password'],
+ project_id=self.project['id'])
+ r = self.v3_authenticate_token(auth_data)
+ self.assertValidProjectScopedTokenResponse(r)
+
+ def test_project_id_scoped_token_with_user_domain_name(self):
+ auth_data = self.build_authentication_request(
+ username=self.user['name'],
+ user_domain_name=self.domain['name'],
+ password=self.user['password'],
+ project_id=self.project['id'])
+ r = self.v3_authenticate_token(auth_data)
+ self.assertValidProjectScopedTokenResponse(r)
+
+ def test_domain_id_scoped_token_with_user_id(self):
+ path = '/domains/%s/users/%s/roles/%s' % (
+ self.domain['id'], self.user['id'], self.role['id'])
+ self.put(path=path)
+
+ auth_data = self.build_authentication_request(
+ user_id=self.user['id'],
+ password=self.user['password'],
+ domain_id=self.domain['id'])
+ r = self.v3_authenticate_token(auth_data)
+ self.assertValidDomainScopedTokenResponse(r)
+
+ def test_domain_id_scoped_token_with_user_domain_id(self):
+ path = '/domains/%s/users/%s/roles/%s' % (
+ self.domain['id'], self.user['id'], self.role['id'])
+ self.put(path=path)
+
+ auth_data = self.build_authentication_request(
+ username=self.user['name'],
+ user_domain_id=self.domain['id'],
+ password=self.user['password'],
+ domain_id=self.domain['id'])
+ r = self.v3_authenticate_token(auth_data)
+ self.assertValidDomainScopedTokenResponse(r)
+
+ def test_domain_id_scoped_token_with_user_domain_name(self):
+ path = '/domains/%s/users/%s/roles/%s' % (
+ self.domain['id'], self.user['id'], self.role['id'])
+ self.put(path=path)
+
+ auth_data = self.build_authentication_request(
+ username=self.user['name'],
+ user_domain_name=self.domain['name'],
+ password=self.user['password'],
+ domain_id=self.domain['id'])
+ r = self.v3_authenticate_token(auth_data)
+ self.assertValidDomainScopedTokenResponse(r)
+
+ def test_domain_name_scoped_token_with_user_id(self):
+ path = '/domains/%s/users/%s/roles/%s' % (
+ self.domain['id'], self.user['id'], self.role['id'])
+ self.put(path=path)
+
+ auth_data = self.build_authentication_request(
+ user_id=self.user['id'],
+ password=self.user['password'],
+ domain_name=self.domain['name'])
+ r = self.v3_authenticate_token(auth_data)
+ self.assertValidDomainScopedTokenResponse(r)
+
+ def test_domain_name_scoped_token_with_user_domain_id(self):
+ path = '/domains/%s/users/%s/roles/%s' % (
+ self.domain['id'], self.user['id'], self.role['id'])
+ self.put(path=path)
+
+ auth_data = self.build_authentication_request(
+ username=self.user['name'],
+ user_domain_id=self.domain['id'],
+ password=self.user['password'],
+ domain_name=self.domain['name'])
+ r = self.v3_authenticate_token(auth_data)
+ self.assertValidDomainScopedTokenResponse(r)
+
+ def test_domain_name_scoped_token_with_user_domain_name(self):
+ path = '/domains/%s/users/%s/roles/%s' % (
+ self.domain['id'], self.user['id'], self.role['id'])
+ self.put(path=path)
+
+ auth_data = self.build_authentication_request(
+ username=self.user['name'],
+ user_domain_name=self.domain['name'],
+ password=self.user['password'],
+ domain_name=self.domain['name'])
+ r = self.v3_authenticate_token(auth_data)
+ self.assertValidDomainScopedTokenResponse(r)
+
+ def test_domain_scope_token_with_group_role(self):
+ group = self.new_group_ref(
+ domain_id=self.domain_id)
+ group = self.identity_api.create_group(group)
+
+ # add user to group
+ self.identity_api.add_user_to_group(self.user['id'], group['id'])
+
+ # grant the domain role to group
+ path = '/domains/%s/groups/%s/roles/%s' % (
+ self.domain['id'], group['id'], self.role['id'])
+ self.put(path=path)
+
+ # now get a domain-scoped token
+ auth_data = self.build_authentication_request(
+ user_id=self.user['id'],
+ password=self.user['password'],
+ domain_id=self.domain['id'])
+ r = self.v3_authenticate_token(auth_data)
+ self.assertValidDomainScopedTokenResponse(r)
+
+ def test_domain_scope_token_with_name(self):
+ # grant the domain role to user
+ path = '/domains/%s/users/%s/roles/%s' % (
+ self.domain['id'], self.user['id'], self.role['id'])
+ self.put(path=path)
+ # now get a domain-scoped token
+ auth_data = self.build_authentication_request(
+ user_id=self.user['id'],
+ password=self.user['password'],
+ domain_name=self.domain['name'])
+ r = self.v3_authenticate_token(auth_data)
+ self.assertValidDomainScopedTokenResponse(r)
+
+ def test_domain_scope_failed(self):
+ auth_data = self.build_authentication_request(
+ user_id=self.user['id'],
+ password=self.user['password'],
+ domain_id=self.domain['id'])
+ self.v3_authenticate_token(auth_data, expected_status=401)
+
+ def test_auth_with_id(self):
+ auth_data = self.build_authentication_request(
+ user_id=self.user['id'],
+ password=self.user['password'])
+ r = self.v3_authenticate_token(auth_data)
+ self.assertValidUnscopedTokenResponse(r)
+
+ token = r.headers.get('X-Subject-Token')
+
+ # test token auth
+ auth_data = self.build_authentication_request(token=token)
+ r = self.v3_authenticate_token(auth_data)
+ self.assertValidUnscopedTokenResponse(r)
+
+ def get_v2_token(self, tenant_id=None):
+ body = {
+ 'auth': {
+ 'passwordCredentials': {
+ 'username': self.default_domain_user['name'],
+ 'password': self.default_domain_user['password'],
+ },
+ },
+ }
+ r = self.admin_request(method='POST', path='/v2.0/tokens', body=body)
+ return r
+
+ def test_validate_v2_unscoped_token_with_v3_api(self):
+ v2_token = self.get_v2_token().result['access']['token']['id']
+ auth_data = self.build_authentication_request(token=v2_token)
+ r = self.v3_authenticate_token(auth_data)
+ self.assertValidUnscopedTokenResponse(r)
+
+ def test_validate_v2_scoped_token_with_v3_api(self):
+ v2_response = self.get_v2_token(
+ tenant_id=self.default_domain_project['id'])
+ result = v2_response.result
+ v2_token = result['access']['token']['id']
+ auth_data = self.build_authentication_request(
+ token=v2_token,
+ project_id=self.default_domain_project['id'])
+ r = self.v3_authenticate_token(auth_data)
+ self.assertValidScopedTokenResponse(r)
+
+ def test_invalid_user_id(self):
+ auth_data = self.build_authentication_request(
+ user_id=uuid.uuid4().hex,
+ password=self.user['password'])
+ self.v3_authenticate_token(auth_data, expected_status=401)
+
+ def test_invalid_user_name(self):
+ auth_data = self.build_authentication_request(
+ username=uuid.uuid4().hex,
+ user_domain_id=self.domain['id'],
+ password=self.user['password'])
+ self.v3_authenticate_token(auth_data, expected_status=401)
+
+ def test_invalid_domain_id(self):
+ auth_data = self.build_authentication_request(
+ username=self.user['name'],
+ user_domain_id=uuid.uuid4().hex,
+ password=self.user['password'])
+ self.v3_authenticate_token(auth_data, expected_status=401)
+
+ def test_invalid_domain_name(self):
+ auth_data = self.build_authentication_request(
+ username=self.user['name'],
+ user_domain_name=uuid.uuid4().hex,
+ password=self.user['password'])
+ self.v3_authenticate_token(auth_data, expected_status=401)
+
+ def test_invalid_password(self):
+ auth_data = self.build_authentication_request(
+ user_id=self.user['id'],
+ password=uuid.uuid4().hex)
+ self.v3_authenticate_token(auth_data, expected_status=401)
+
+ def test_remote_user_no_realm(self):
+ CONF.auth.methods = 'external'
+ api = auth.controllers.Auth()
+ context, auth_info, auth_context = self.build_external_auth_request(
+ self.default_domain_user['name'])
+ api.authenticate(context, auth_info, auth_context)
+ self.assertEqual(auth_context['user_id'],
+ self.default_domain_user['id'])
+ # Now test to make sure the user name can, itself, contain the
+ # '@' character.
+ user = {'name': 'myname@mydivision'}
+ self.identity_api.update_user(self.default_domain_user['id'], user)
+ context, auth_info, auth_context = self.build_external_auth_request(
+ user["name"])
+ api.authenticate(context, auth_info, auth_context)
+ self.assertEqual(auth_context['user_id'],
+ self.default_domain_user['id'])
+
+ def test_remote_user_no_domain(self):
+ api = auth.controllers.Auth()
+ context, auth_info, auth_context = self.build_external_auth_request(
+ self.user['name'])
+ self.assertRaises(exception.Unauthorized,
+ api.authenticate,
+ context,
+ auth_info,
+ auth_context)
+
+ def test_remote_user_and_password(self):
+ # both REMOTE_USER and password methods must pass.
+ # note that they do not have to match
+ api = auth.controllers.Auth()
+ auth_data = self.build_authentication_request(
+ user_domain_id=self.default_domain_user['domain_id'],
+ username=self.default_domain_user['name'],
+ password=self.default_domain_user['password'])['auth']
+ context, auth_info, auth_context = self.build_external_auth_request(
+ self.default_domain_user['name'], auth_data=auth_data)
+
+ api.authenticate(context, auth_info, auth_context)
+
+ def test_remote_user_and_explicit_external(self):
+ # both REMOTE_USER and password methods must pass.
+ # note that they do not have to match
+ auth_data = self.build_authentication_request(
+ user_domain_id=self.domain['id'],
+ username=self.user['name'],
+ password=self.user['password'])['auth']
+ auth_data['identity']['methods'] = ["password", "external"]
+ auth_data['identity']['external'] = {}
+ api = auth.controllers.Auth()
+ auth_info = auth.controllers.AuthInfo(None, auth_data)
+ auth_context = {'extras': {}, 'method_names': []}
+ self.assertRaises(exception.Unauthorized,
+ api.authenticate,
+ self.empty_context,
+ auth_info,
+ auth_context)
+
+ def test_remote_user_bad_password(self):
+ # both REMOTE_USER and password methods must pass.
+ api = auth.controllers.Auth()
+ auth_data = self.build_authentication_request(
+ user_domain_id=self.domain['id'],
+ username=self.user['name'],
+ password='badpassword')['auth']
+ context, auth_info, auth_context = self.build_external_auth_request(
+ self.default_domain_user['name'], auth_data=auth_data)
+ self.assertRaises(exception.Unauthorized,
+ api.authenticate,
+ context,
+ auth_info,
+ auth_context)
+
+ def test_bind_not_set_with_remote_user(self):
+ self.config_fixture.config(group='token', bind=[])
+ auth_data = self.build_authentication_request()
+ remote_user = self.default_domain_user['name']
+ self.admin_app.extra_environ.update({'REMOTE_USER': remote_user,
+ 'AUTH_TYPE': 'Negotiate'})
+ r = self.v3_authenticate_token(auth_data)
+ token = self.assertValidUnscopedTokenResponse(r)
+ self.assertNotIn('bind', token)
+
+ # TODO(ayoung): move to TestPKITokenAPIs; it will be run for both formats
+ def test_verify_with_bound_token(self):
+ self.config_fixture.config(group='token', bind='kerberos')
+ auth_data = self.build_authentication_request(
+ project_id=self.project['id'])
+ remote_user = self.default_domain_user['name']
+ self.admin_app.extra_environ.update({'REMOTE_USER': remote_user,
+ 'AUTH_TYPE': 'Negotiate'})
+
+ token = self.get_requested_token(auth_data)
+ headers = {'X-Subject-Token': token}
+ r = self.get('/auth/tokens', headers=headers, token=token)
+ token = self.assertValidProjectScopedTokenResponse(r)
+ self.assertEqual(token['bind']['kerberos'],
+ self.default_domain_user['name'])
+
+ def test_auth_with_bind_token(self):
+ self.config_fixture.config(group='token', bind=['kerberos'])
+
+ auth_data = self.build_authentication_request()
+ remote_user = self.default_domain_user['name']
+ self.admin_app.extra_environ.update({'REMOTE_USER': remote_user,
+ 'AUTH_TYPE': 'Negotiate'})
+ r = self.v3_authenticate_token(auth_data)
+
+ # the unscoped token should have bind information in it
+ token = self.assertValidUnscopedTokenResponse(r)
+ self.assertEqual(token['bind']['kerberos'], remote_user)
+
+ token = r.headers.get('X-Subject-Token')
+
+ # using unscoped token with remote user succeeds
+ auth_params = {'token': token, 'project_id': self.project_id}
+ auth_data = self.build_authentication_request(**auth_params)
+ r = self.v3_authenticate_token(auth_data)
+ token = self.assertValidProjectScopedTokenResponse(r)
+
+ # the bind information should be carried over from the original token
+ self.assertEqual(token['bind']['kerberos'], remote_user)
+
+ def test_v2_v3_bind_token_intermix(self):
+ self.config_fixture.config(group='token', bind='kerberos')
+
+ # we need our own user registered to the default domain because of
+ # the way external auth works.
+ remote_user = self.default_domain_user['name']
+ self.admin_app.extra_environ.update({'REMOTE_USER': remote_user,
+ 'AUTH_TYPE': 'Negotiate'})
+ body = {'auth': {}}
+ resp = self.admin_request(path='/v2.0/tokens',
+ method='POST',
+ body=body)
+
+ v2_token_data = resp.result
+
+ bind = v2_token_data['access']['token']['bind']
+ self.assertEqual(bind['kerberos'], self.default_domain_user['name'])
+
+ v2_token_id = v2_token_data['access']['token']['id']
+ # NOTE(gyee): self.get() will try to obtain an auth token if one
+ # is not provided. When REMOTE_USER is present in the request
+ # environment, the external user auth plugin is used in conjunction
+ # with the password auth for the admin user. Therefore, we need to
+ # cleanup the REMOTE_USER information from the previous call.
+ del self.admin_app.extra_environ['REMOTE_USER']
+ headers = {'X-Subject-Token': v2_token_id}
+ resp = self.get('/auth/tokens', headers=headers)
+ token_data = resp.result
+
+ self.assertDictEqual(v2_token_data['access']['token']['bind'],
+ token_data['token']['bind'])
+
+ def test_authenticating_a_user_with_no_password(self):
+ user = self.new_user_ref(domain_id=self.domain['id'])
+ user.pop('password', None) # can't have a password for this test
+ user = self.identity_api.create_user(user)
+
+ auth_data = self.build_authentication_request(
+ user_id=user['id'],
+ password='password')
+
+ self.v3_authenticate_token(auth_data, expected_status=401)
+
+ def test_disabled_default_project_result_in_unscoped_token(self):
+ # create a disabled project to work with
+ project = self.create_new_default_project_for_user(
+ self.user['id'], self.domain_id, enable_project=False)
+
+ # assign a role to user for the new project
+ self.assignment_api.add_role_to_user_and_project(self.user['id'],
+ project['id'],
+ self.role_id)
+
+ # attempt to authenticate without requesting a project
+ auth_data = self.build_authentication_request(
+ user_id=self.user['id'],
+ password=self.user['password'])
+ r = self.v3_authenticate_token(auth_data)
+ self.assertValidUnscopedTokenResponse(r)
+
+ def test_disabled_default_project_domain_result_in_unscoped_token(self):
+ domain_ref = self.new_domain_ref()
+ r = self.post('/domains', body={'domain': domain_ref})
+ domain = self.assertValidDomainResponse(r, domain_ref)
+
+ project = self.create_new_default_project_for_user(
+ self.user['id'], domain['id'])
+
+ # assign a role to user for the new project
+ self.assignment_api.add_role_to_user_and_project(self.user['id'],
+ project['id'],
+ self.role_id)
+
+ # now disable the project domain
+ body = {'domain': {'enabled': False}}
+ r = self.patch('/domains/%(domain_id)s' % {'domain_id': domain['id']},
+ body=body)
+ self.assertValidDomainResponse(r)
+
+ # attempt to authenticate without requesting a project
+ auth_data = self.build_authentication_request(
+ user_id=self.user['id'],
+ password=self.user['password'])
+ r = self.v3_authenticate_token(auth_data)
+ self.assertValidUnscopedTokenResponse(r)
+
+ def test_no_access_to_default_project_result_in_unscoped_token(self):
+ # create a disabled project to work with
+ self.create_new_default_project_for_user(self.user['id'],
+ self.domain_id)
+
+ # attempt to authenticate without requesting a project
+ auth_data = self.build_authentication_request(
+ user_id=self.user['id'],
+ password=self.user['password'])
+ r = self.v3_authenticate_token(auth_data)
+ self.assertValidUnscopedTokenResponse(r)
+
+ def test_disabled_scope_project_domain_result_in_401(self):
+ # create a disabled domain
+ domain = self.new_domain_ref()
+ domain['enabled'] = False
+ self.resource_api.create_domain(domain['id'], domain)
+
+ # create a project in the disabled domain
+ project = self.new_project_ref(domain_id=domain['id'])
+ self.resource_api.create_project(project['id'], project)
+
+ # assign some role to self.user for the project in the disabled domain
+ self.assignment_api.add_role_to_user_and_project(
+ self.user['id'],
+ project['id'],
+ self.role_id)
+
+ # user should not be able to auth with project_id
+ auth_data = self.build_authentication_request(
+ user_id=self.user['id'],
+ password=self.user['password'],
+ project_id=project['id'])
+ self.v3_authenticate_token(auth_data, expected_status=401)
+
+ # user should not be able to auth with project_name & domain
+ auth_data = self.build_authentication_request(
+ user_id=self.user['id'],
+ password=self.user['password'],
+ project_name=project['name'],
+ project_domain_id=domain['id'])
+ self.v3_authenticate_token(auth_data, expected_status=401)
+
+ def test_auth_methods_with_different_identities_fails(self):
+ # get the token for a user. This is self.user which is different from
+ # self.default_domain_user.
+ token = self.get_scoped_token()
+ # try both password and token methods with different identities and it
+ # should fail
+ auth_data = self.build_authentication_request(
+ token=token,
+ user_id=self.default_domain_user['id'],
+ password=self.default_domain_user['password'])
+ self.v3_authenticate_token(auth_data, expected_status=401)
+
+
+class TestAuthJSONExternal(test_v3.RestfulTestCase):
+ content_type = 'json'
+
+ def config_overrides(self):
+ super(TestAuthJSONExternal, self).config_overrides()
+ self.config_fixture.config(group='auth', methods='')
+
+ def auth_plugin_config_override(self, methods=None, **method_classes):
+ self.config_fixture.config(group='auth', methods='')
+
+ def test_remote_user_no_method(self):
+ api = auth.controllers.Auth()
+ context, auth_info, auth_context = self.build_external_auth_request(
+ self.default_domain_user['name'])
+ self.assertRaises(exception.Unauthorized,
+ api.authenticate,
+ context,
+ auth_info,
+ auth_context)
+
+
+class TestTrustOptional(test_v3.RestfulTestCase):
+ def config_overrides(self):
+ super(TestTrustOptional, self).config_overrides()
+ self.config_fixture.config(group='trust', enabled=False)
+
+ def test_trusts_404(self):
+ self.get('/OS-TRUST/trusts', body={'trust': {}}, expected_status=404)
+ self.post('/OS-TRUST/trusts', body={'trust': {}}, expected_status=404)
+
+ def test_auth_with_scope_in_trust_403(self):
+ auth_data = self.build_authentication_request(
+ user_id=self.user['id'],
+ password=self.user['password'],
+ trust_id=uuid.uuid4().hex)
+ self.v3_authenticate_token(auth_data, expected_status=403)
+
+
+class TestTrustRedelegation(test_v3.RestfulTestCase):
+ """Redelegation valid and secure
+
+ Redelegation is a hierarchical structure of trusts between initial trustor
+ and a group of users allowed to impersonate trustor and act in his name.
+ Hierarchy is created in a process of trusting already trusted permissions
+ and organized as an adjacency list using 'redelegated_trust_id' field.
+ Redelegation is valid if each subsequent trust in a chain passes 'not more'
+ permissions than being redelegated.
+
+ Trust constraints are:
+ * roles - set of roles trusted by trustor
+ * expiration_time
+ * allow_redelegation - a flag
+ * redelegation_count - decreasing value restricting length of trust chain
+ * remaining_uses - DISALLOWED when allow_redelegation == True
+
+ Trust becomes invalid in case:
+ * trust roles were revoked from trustor
+ * one of the users in the delegation chain was disabled or deleted
+ * expiration time passed
+ * one of the parent trusts has become invalid
+ * one of the parent trusts was deleted
+
+ """
+
+ def config_overrides(self):
+ super(TestTrustRedelegation, self).config_overrides()
+ self.config_fixture.config(
+ group='trust',
+ enabled=True,
+ allow_redelegation=True,
+ max_redelegation_count=10
+ )
+
+ def setUp(self):
+ super(TestTrustRedelegation, self).setUp()
+ # Create a trustee to delegate stuff to
+ trustee_user_ref = self.new_user_ref(domain_id=self.domain_id)
+ self.trustee_user = self.identity_api.create_user(trustee_user_ref)
+ self.trustee_user['password'] = trustee_user_ref['password']
+
+ # trustor->trustee
+ self.redelegated_trust_ref = self.new_trust_ref(
+ trustor_user_id=self.user_id,
+ trustee_user_id=self.trustee_user['id'],
+ project_id=self.project_id,
+ impersonation=True,
+ expires=dict(minutes=1),
+ role_ids=[self.role_id],
+ allow_redelegation=True)
+
+ # trustor->trustee (no redelegation)
+ self.chained_trust_ref = self.new_trust_ref(
+ trustor_user_id=self.user_id,
+ trustee_user_id=self.trustee_user['id'],
+ project_id=self.project_id,
+ impersonation=True,
+ role_ids=[self.role_id],
+ allow_redelegation=True)
+
+ def _get_trust_token(self, trust):
+ trust_id = trust['id']
+ auth_data = self.build_authentication_request(
+ user_id=self.trustee_user['id'],
+ password=self.trustee_user['password'],
+ trust_id=trust_id)
+ trust_token = self.get_requested_token(auth_data)
+ return trust_token
+
+ def test_depleted_redelegation_count_error(self):
+ self.redelegated_trust_ref['redelegation_count'] = 0
+ r = self.post('/OS-TRUST/trusts',
+ body={'trust': self.redelegated_trust_ref})
+ trust = self.assertValidTrustResponse(r)
+ trust_token = self._get_trust_token(trust)
+
+ # Attempt to create a redelegated trust.
+ self.post('/OS-TRUST/trusts',
+ body={'trust': self.chained_trust_ref},
+ token=trust_token,
+ expected_status=403)
+
+ def test_modified_redelegation_count_error(self):
+ r = self.post('/OS-TRUST/trusts',
+ body={'trust': self.redelegated_trust_ref})
+ trust = self.assertValidTrustResponse(r)
+ trust_token = self._get_trust_token(trust)
+
+ # Attempt to create a redelegated trust with incorrect
+ # redelegation_count.
+ correct = trust['redelegation_count'] - 1
+ incorrect = correct - 1
+ self.chained_trust_ref['redelegation_count'] = incorrect
+ self.post('/OS-TRUST/trusts',
+ body={'trust': self.chained_trust_ref},
+ token=trust_token,
+ expected_status=403)
+
+ def test_max_redelegation_count_constraint(self):
+ incorrect = CONF.trust.max_redelegation_count + 1
+ self.redelegated_trust_ref['redelegation_count'] = incorrect
+ self.post('/OS-TRUST/trusts',
+ body={'trust': self.redelegated_trust_ref},
+ expected_status=403)
+
+ def test_redelegation_expiry(self):
+ r = self.post('/OS-TRUST/trusts',
+ body={'trust': self.redelegated_trust_ref})
+ trust = self.assertValidTrustResponse(r)
+ trust_token = self._get_trust_token(trust)
+
+ # Attempt to create a redelegated trust supposed to last longer
+ # than the parent trust: let's give it 10 minutes (>1 minute).
+ too_long_live_chained_trust_ref = self.new_trust_ref(
+ trustor_user_id=self.user_id,
+ trustee_user_id=self.trustee_user['id'],
+ project_id=self.project_id,
+ impersonation=True,
+ expires=dict(minutes=10),
+ role_ids=[self.role_id])
+ self.post('/OS-TRUST/trusts',
+ body={'trust': too_long_live_chained_trust_ref},
+ token=trust_token,
+ expected_status=403)
+
+ def test_redelegation_remaining_uses(self):
+ r = self.post('/OS-TRUST/trusts',
+ body={'trust': self.redelegated_trust_ref})
+ trust = self.assertValidTrustResponse(r)
+ trust_token = self._get_trust_token(trust)
+
+ # Attempt to create a redelegated trust with remaining_uses defined.
+ # It must fail according to specification: remaining_uses must be
+ # omitted for trust redelegation. Any number here.
+ self.chained_trust_ref['remaining_uses'] = 5
+ self.post('/OS-TRUST/trusts',
+ body={'trust': self.chained_trust_ref},
+ token=trust_token,
+ expected_status=403)
+
+ def test_roles_subset(self):
+ # Build second role
+ role = self.new_role_ref()
+ self.assignment_api.create_role(role['id'], role)
+ # assign a new role to the user
+ self.assignment_api.create_grant(role_id=role['id'],
+ user_id=self.user_id,
+ project_id=self.project_id)
+
+ # Create first trust with extended set of roles
+ ref = self.redelegated_trust_ref
+ ref['roles'].append({'id': role['id']})
+ r = self.post('/OS-TRUST/trusts',
+ body={'trust': ref})
+ trust = self.assertValidTrustResponse(r)
+ # Trust created with exact set of roles (checked by role id)
+ role_id_set = set(r['id'] for r in ref['roles'])
+ trust_role_id_set = set(r['id'] for r in trust['roles'])
+ self.assertEqual(role_id_set, trust_role_id_set)
+
+ trust_token = self._get_trust_token(trust)
+
+ # Chain second trust with roles subset
+ r = self.post('/OS-TRUST/trusts',
+ body={'trust': self.chained_trust_ref},
+ token=trust_token)
+ trust2 = self.assertValidTrustResponse(r)
+ # First trust contains roles superset
+ # Second trust contains roles subset
+ role_id_set1 = set(r['id'] for r in trust['roles'])
+ role_id_set2 = set(r['id'] for r in trust2['roles'])
+ self.assertThat(role_id_set1, matchers.GreaterThan(role_id_set2))
+
+ def test_redelegate_with_role_by_name(self):
+ # For role by name testing
+ ref = self.new_trust_ref(
+ trustor_user_id=self.user_id,
+ trustee_user_id=self.trustee_user['id'],
+ project_id=self.project_id,
+ impersonation=True,
+ expires=dict(minutes=1),
+ role_names=[self.role['name']],
+ allow_redelegation=True)
+ r = self.post('/OS-TRUST/trusts',
+ body={'trust': ref})
+ trust = self.assertValidTrustResponse(r)
+ # Ensure we can get a token with this trust
+ trust_token = self._get_trust_token(trust)
+ # Chain second trust with roles subset
+ ref = self.new_trust_ref(
+ trustor_user_id=self.user_id,
+ trustee_user_id=self.trustee_user['id'],
+ project_id=self.project_id,
+ impersonation=True,
+ role_names=[self.role['name']],
+ allow_redelegation=True)
+ r = self.post('/OS-TRUST/trusts',
+ body={'trust': ref},
+ token=trust_token)
+ trust = self.assertValidTrustResponse(r)
+ # Ensure we can get a token with this trust
+ self._get_trust_token(trust)
+
+ def test_redelegate_new_role_fails(self):
+ r = self.post('/OS-TRUST/trusts',
+ body={'trust': self.redelegated_trust_ref})
+ trust = self.assertValidTrustResponse(r)
+ trust_token = self._get_trust_token(trust)
+
+ # Build second trust with a role not in parent's roles
+ role = self.new_role_ref()
+ self.assignment_api.create_role(role['id'], role)
+ # assign a new role to the user
+ self.assignment_api.create_grant(role_id=role['id'],
+ user_id=self.user_id,
+ project_id=self.project_id)
+
+ # Try to chain a trust with the role not from parent trust
+ self.chained_trust_ref['roles'] = [{'id': role['id']}]
+
+ # Bypass policy enforcement
+ with mock.patch.object(rules, 'enforce', return_value=True):
+ self.post('/OS-TRUST/trusts',
+ body={'trust': self.chained_trust_ref},
+ token=trust_token,
+ expected_status=403)
+
+ def test_redelegation_terminator(self):
+ r = self.post('/OS-TRUST/trusts',
+ body={'trust': self.redelegated_trust_ref})
+ trust = self.assertValidTrustResponse(r)
+ trust_token = self._get_trust_token(trust)
+
+ # Build second trust - the terminator
+ ref = dict(self.chained_trust_ref,
+ redelegation_count=1,
+ allow_redelegation=False)
+
+ r = self.post('/OS-TRUST/trusts',
+ body={'trust': ref},
+ token=trust_token)
+
+ trust = self.assertValidTrustResponse(r)
+ # Check that allow_redelegation == False caused redelegation_count
+ # to be set to 0, while allow_redelegation is removed
+ self.assertNotIn('allow_redelegation', trust)
+ self.assertEqual(trust['redelegation_count'], 0)
+ trust_token = self._get_trust_token(trust)
+
+ # Build third trust, same as second
+ self.post('/OS-TRUST/trusts',
+ body={'trust': ref},
+ token=trust_token,
+ expected_status=403)
+
+
+class TestTrustChain(test_v3.RestfulTestCase):
+
+ def config_overrides(self):
+ super(TestTrustChain, self).config_overrides()
+ self.config_fixture.config(
+ group='trust',
+ enabled=True,
+ allow_redelegation=True,
+ max_redelegation_count=10
+ )
+
+ def setUp(self):
+ super(TestTrustChain, self).setUp()
+ # Create trust chain
+ self.user_chain = list()
+ self.trust_chain = list()
+ for _ in xrange(3):
+ user_ref = self.new_user_ref(domain_id=self.domain_id)
+ user = self.identity_api.create_user(user_ref)
+ user['password'] = user_ref['password']
+ self.user_chain.append(user)
+
+ # trustor->trustee
+ trustee = self.user_chain[0]
+ trust_ref = self.new_trust_ref(
+ trustor_user_id=self.user_id,
+ trustee_user_id=trustee['id'],
+ project_id=self.project_id,
+ impersonation=True,
+ expires=dict(minutes=1),
+ role_ids=[self.role_id])
+ trust_ref.update(
+ allow_redelegation=True,
+ redelegation_count=3)
+
+ r = self.post('/OS-TRUST/trusts',
+ body={'trust': trust_ref})
+
+ trust = self.assertValidTrustResponse(r)
+ auth_data = self.build_authentication_request(
+ user_id=trustee['id'],
+ password=trustee['password'],
+ trust_id=trust['id'])
+ trust_token = self.get_requested_token(auth_data)
+ self.trust_chain.append(trust)
+
+ for trustee in self.user_chain[1:]:
+ trust_ref = self.new_trust_ref(
+ trustor_user_id=self.user_id,
+ trustee_user_id=trustee['id'],
+ project_id=self.project_id,
+ impersonation=True,
+ role_ids=[self.role_id])
+ trust_ref.update(
+ allow_redelegation=True)
+ r = self.post('/OS-TRUST/trusts',
+ body={'trust': trust_ref},
+ token=trust_token)
+ trust = self.assertValidTrustResponse(r)
+ auth_data = self.build_authentication_request(
+ user_id=trustee['id'],
+ password=trustee['password'],
+ trust_id=trust['id'])
+ trust_token = self.get_requested_token(auth_data)
+ self.trust_chain.append(trust)
+
+ trustee = self.user_chain[-1]
+ trust = self.trust_chain[-1]
+ auth_data = self.build_authentication_request(
+ user_id=trustee['id'],
+ password=trustee['password'],
+ trust_id=trust['id'])
+
+ self.last_token = self.get_requested_token(auth_data)
+
+ def assert_user_authenticate(self, user):
+ auth_data = self.build_authentication_request(
+ user_id=user['id'],
+ password=user['password']
+ )
+ r = self.v3_authenticate_token(auth_data)
+ self.assertValidTokenResponse(r)
+
+ def assert_trust_tokens_revoked(self, trust_id):
+ trustee = self.user_chain[0]
+ auth_data = self.build_authentication_request(
+ user_id=trustee['id'],
+ password=trustee['password']
+ )
+ r = self.v3_authenticate_token(auth_data)
+ self.assertValidTokenResponse(r)
+
+ revocation_response = self.get('/OS-REVOKE/events')
+ revocation_events = revocation_response.json_body['events']
+ found = False
+ for event in revocation_events:
+ if event.get('OS-TRUST:trust_id') == trust_id:
+ found = True
+ self.assertTrue(found, 'event with trust_id %s not found in list' %
+ trust_id)
+
+ def test_delete_trust_cascade(self):
+ self.assert_user_authenticate(self.user_chain[0])
+ self.delete('/OS-TRUST/trusts/%(trust_id)s' % {
+ 'trust_id': self.trust_chain[0]['id']},
+ expected_status=204)
+
+ headers = {'X-Subject-Token': self.last_token}
+ self.head('/auth/tokens', headers=headers, expected_status=404)
+ self.assert_trust_tokens_revoked(self.trust_chain[0]['id'])
+
+ def test_delete_broken_chain(self):
+ self.assert_user_authenticate(self.user_chain[0])
+ self.delete('/OS-TRUST/trusts/%(trust_id)s' % {
+ 'trust_id': self.trust_chain[1]['id']},
+ expected_status=204)
+
+ self.delete('/OS-TRUST/trusts/%(trust_id)s' % {
+ 'trust_id': self.trust_chain[0]['id']},
+ expected_status=204)
+
+ def test_trustor_roles_revoked(self):
+ self.assert_user_authenticate(self.user_chain[0])
+
+ self.assignment_api.remove_role_from_user_and_project(
+ self.user_id, self.project_id, self.role_id
+ )
+
+ auth_data = self.build_authentication_request(
+ token=self.last_token,
+ trust_id=self.trust_chain[-1]['id'])
+ self.v3_authenticate_token(auth_data, expected_status=404)
+
+ def test_intermediate_user_disabled(self):
+ self.assert_user_authenticate(self.user_chain[0])
+
+ disabled = self.user_chain[0]
+ disabled['enabled'] = False
+ self.identity_api.update_user(disabled['id'], disabled)
+
+ # Bypass policy enforcement
+ with mock.patch.object(rules, 'enforce', return_value=True):
+ headers = {'X-Subject-Token': self.last_token}
+ self.head('/auth/tokens', headers=headers, expected_status=403)
+
+ def test_intermediate_user_deleted(self):
+ self.assert_user_authenticate(self.user_chain[0])
+
+ self.identity_api.delete_user(self.user_chain[0]['id'])
+
+ # Bypass policy enforcement
+ with mock.patch.object(rules, 'enforce', return_value=True):
+ headers = {'X-Subject-Token': self.last_token}
+ self.head('/auth/tokens', headers=headers, expected_status=403)
+
+
+class TestTrustAuth(test_v3.RestfulTestCase):
+ EXTENSION_NAME = 'revoke'
+ EXTENSION_TO_ADD = 'revoke_extension'
+
+ def config_overrides(self):
+ super(TestTrustAuth, self).config_overrides()
+ self.config_fixture.config(
+ group='revoke',
+ driver='keystone.contrib.revoke.backends.kvs.Revoke')
+ self.config_fixture.config(
+ group='token',
+ provider='keystone.token.providers.pki.Provider',
+ revoke_by_id=False)
+ self.config_fixture.config(group='trust', enabled=True)
+
+ def setUp(self):
+ super(TestTrustAuth, self).setUp()
+
+ # create a trustee to delegate stuff to
+ self.trustee_user = self.new_user_ref(domain_id=self.domain_id)
+ password = self.trustee_user['password']
+ self.trustee_user = self.identity_api.create_user(self.trustee_user)
+ self.trustee_user['password'] = password
+ self.trustee_user_id = self.trustee_user['id']
+
+ def test_create_trust_400(self):
+ # The server returns a 403 Forbidden rather than a 400, see bug 1133435
+ self.post('/OS-TRUST/trusts', body={'trust': {}}, expected_status=403)
+
+ def test_create_unscoped_trust(self):
+ ref = self.new_trust_ref(
+ trustor_user_id=self.user_id,
+ trustee_user_id=self.trustee_user_id)
+ r = self.post('/OS-TRUST/trusts', body={'trust': ref})
+ self.assertValidTrustResponse(r, ref)
+
+ def test_create_trust_no_roles(self):
+ ref = self.new_trust_ref(
+ trustor_user_id=self.user_id,
+ trustee_user_id=self.trustee_user_id,
+ project_id=self.project_id)
+ self.post('/OS-TRUST/trusts', body={'trust': ref}, expected_status=403)
+
+ def _initialize_test_consume_trust(self, count):
+ # Make sure remaining_uses is decremented as we consume the trust
+ ref = self.new_trust_ref(
+ trustor_user_id=self.user_id,
+ trustee_user_id=self.trustee_user_id,
+ project_id=self.project_id,
+ remaining_uses=count,
+ role_ids=[self.role_id])
+ r = self.post('/OS-TRUST/trusts', body={'trust': ref})
+ # make sure the trust exists
+ trust = self.assertValidTrustResponse(r, ref)
+ r = self.get(
+ '/OS-TRUST/trusts/%(trust_id)s' % {'trust_id': trust['id']},
+ expected_status=200)
+ # get a token for the trustee
+ auth_data = self.build_authentication_request(
+ user_id=self.trustee_user['id'],
+ password=self.trustee_user['password'])
+ r = self.v3_authenticate_token(auth_data)
+ token = r.headers.get('X-Subject-Token')
+ # get a trust token, consume one use
+ auth_data = self.build_authentication_request(
+ token=token,
+ trust_id=trust['id'])
+ r = self.v3_authenticate_token(auth_data)
+ return trust
+
+ def test_consume_trust_once(self):
+ trust = self._initialize_test_consume_trust(2)
+ # check decremented value
+ r = self.get(
+ '/OS-TRUST/trusts/%(trust_id)s' % {'trust_id': trust['id']},
+ expected_status=200)
+ trust = r.result.get('trust')
+ self.assertIsNotNone(trust)
+ self.assertEqual(trust['remaining_uses'], 1)
+
+ def test_create_one_time_use_trust(self):
+ trust = self._initialize_test_consume_trust(1)
+ # No more uses, the trust is made unavailable
+ self.get(
+ '/OS-TRUST/trusts/%(trust_id)s' % {'trust_id': trust['id']},
+ expected_status=404)
+ # this time we can't get a trust token
+ auth_data = self.build_authentication_request(
+ user_id=self.trustee_user['id'],
+ password=self.trustee_user['password'],
+ trust_id=trust['id'])
+ self.v3_authenticate_token(auth_data, expected_status=401)
+
+ def test_create_trust_with_bad_values_for_remaining_uses(self):
+ # negative values for the remaining_uses parameter are forbidden
+ self._create_trust_with_bad_remaining_use(bad_value=-1)
+ # 0 is a forbidden value as well
+ self._create_trust_with_bad_remaining_use(bad_value=0)
+ # as are non integer values
+ self._create_trust_with_bad_remaining_use(bad_value="a bad value")
+ self._create_trust_with_bad_remaining_use(bad_value=7.2)
+
+ def _create_trust_with_bad_remaining_use(self, bad_value):
+ ref = self.new_trust_ref(
+ trustor_user_id=self.user_id,
+ trustee_user_id=self.trustee_user_id,
+ project_id=self.project_id,
+ remaining_uses=bad_value,
+ role_ids=[self.role_id])
+ self.post('/OS-TRUST/trusts',
+ body={'trust': ref},
+ expected_status=400)
+
+ def test_invalid_trust_request_without_impersonation(self):
+ ref = self.new_trust_ref(
+ trustor_user_id=self.user_id,
+ trustee_user_id=self.trustee_user_id,
+ project_id=self.project_id,
+ role_ids=[self.role_id])
+
+ del ref['impersonation']
+
+ self.post('/OS-TRUST/trusts',
+ body={'trust': ref},
+ expected_status=400)
+
+ def test_invalid_trust_request_without_trustee(self):
+ ref = self.new_trust_ref(
+ trustor_user_id=self.user_id,
+ trustee_user_id=self.trustee_user_id,
+ project_id=self.project_id,
+ role_ids=[self.role_id])
+
+ del ref['trustee_user_id']
+
+ self.post('/OS-TRUST/trusts',
+ body={'trust': ref},
+ expected_status=400)
+
+ def test_create_unlimited_use_trust(self):
+ # by default trusts are unlimited in terms of tokens that can be
+ # generated from them, this test creates such a trust explicitly
+ ref = self.new_trust_ref(
+ trustor_user_id=self.user_id,
+ trustee_user_id=self.trustee_user_id,
+ project_id=self.project_id,
+ remaining_uses=None,
+ role_ids=[self.role_id])
+ r = self.post('/OS-TRUST/trusts', body={'trust': ref})
+ trust = self.assertValidTrustResponse(r, ref)
+
+ r = self.get(
+ '/OS-TRUST/trusts/%(trust_id)s' % {'trust_id': trust['id']},
+ expected_status=200)
+ auth_data = self.build_authentication_request(
+ user_id=self.trustee_user['id'],
+ password=self.trustee_user['password'])
+ r = self.v3_authenticate_token(auth_data)
+ token = r.headers.get('X-Subject-Token')
+ auth_data = self.build_authentication_request(
+ token=token,
+ trust_id=trust['id'])
+ r = self.v3_authenticate_token(auth_data)
+ r = self.get(
+ '/OS-TRUST/trusts/%(trust_id)s' % {'trust_id': trust['id']},
+ expected_status=200)
+ trust = r.result.get('trust')
+ self.assertIsNone(trust['remaining_uses'])
+
+ def test_trust_crud(self):
+ ref = self.new_trust_ref(
+ trustor_user_id=self.user_id,
+ trustee_user_id=self.trustee_user_id,
+ project_id=self.project_id,
+ role_ids=[self.role_id])
+ r = self.post('/OS-TRUST/trusts', body={'trust': ref})
+ trust = self.assertValidTrustResponse(r, ref)
+
+ r = self.get(
+ '/OS-TRUST/trusts/%(trust_id)s' % {'trust_id': trust['id']},
+ expected_status=200)
+ self.assertValidTrustResponse(r, ref)
+
+ # validate roles on the trust
+ r = self.get(
+ '/OS-TRUST/trusts/%(trust_id)s/roles' % {
+ 'trust_id': trust['id']},
+ expected_status=200)
+ roles = self.assertValidRoleListResponse(r, self.role)
+ self.assertIn(self.role['id'], [x['id'] for x in roles])
+ self.head(
+ '/OS-TRUST/trusts/%(trust_id)s/roles/%(role_id)s' % {
+ 'trust_id': trust['id'],
+ 'role_id': self.role['id']},
+ expected_status=200)
+ r = self.get(
+ '/OS-TRUST/trusts/%(trust_id)s/roles/%(role_id)s' % {
+ 'trust_id': trust['id'],
+ 'role_id': self.role['id']},
+ expected_status=200)
+ self.assertValidRoleResponse(r, self.role)
+
+ r = self.get('/OS-TRUST/trusts', expected_status=200)
+ self.assertValidTrustListResponse(r, trust)
+
+ # trusts are immutable
+ self.patch(
+ '/OS-TRUST/trusts/%(trust_id)s' % {'trust_id': trust['id']},
+ body={'trust': ref},
+ expected_status=404)
+
+ self.delete(
+ '/OS-TRUST/trusts/%(trust_id)s' % {'trust_id': trust['id']},
+ expected_status=204)
+
+ self.get(
+ '/OS-TRUST/trusts/%(trust_id)s' % {'trust_id': trust['id']},
+ expected_status=404)
+
+ def test_create_trust_trustee_404(self):
+ ref = self.new_trust_ref(
+ trustor_user_id=self.user_id,
+ trustee_user_id=uuid.uuid4().hex,
+ project_id=self.project_id,
+ role_ids=[self.role_id])
+ self.post('/OS-TRUST/trusts', body={'trust': ref}, expected_status=404)
+
+ def test_create_trust_trustor_trustee_backwards(self):
+ ref = self.new_trust_ref(
+ trustor_user_id=self.trustee_user_id,
+ trustee_user_id=self.user_id,
+ project_id=self.project_id,
+ role_ids=[self.role_id])
+ self.post('/OS-TRUST/trusts', body={'trust': ref}, expected_status=403)
+
+ def test_create_trust_project_404(self):
+ ref = self.new_trust_ref(
+ trustor_user_id=self.user_id,
+ trustee_user_id=self.trustee_user_id,
+ project_id=uuid.uuid4().hex,
+ role_ids=[self.role_id])
+ self.post('/OS-TRUST/trusts', body={'trust': ref}, expected_status=404)
+
+ def test_create_trust_role_id_404(self):
+ ref = self.new_trust_ref(
+ trustor_user_id=self.user_id,
+ trustee_user_id=self.trustee_user_id,
+ project_id=self.project_id,
+ role_ids=[uuid.uuid4().hex])
+ self.post('/OS-TRUST/trusts', body={'trust': ref}, expected_status=404)
+
+ def test_create_trust_role_name_404(self):
+ ref = self.new_trust_ref(
+ trustor_user_id=self.user_id,
+ trustee_user_id=self.trustee_user_id,
+ project_id=self.project_id,
+ role_names=[uuid.uuid4().hex])
+ self.post('/OS-TRUST/trusts', body={'trust': ref}, expected_status=404)
+
+ def test_create_expired_trust(self):
+ ref = self.new_trust_ref(
+ trustor_user_id=self.user_id,
+ trustee_user_id=self.trustee_user_id,
+ project_id=self.project_id,
+ expires=dict(seconds=-1),
+ role_ids=[self.role_id])
+ r = self.post('/OS-TRUST/trusts', body={'trust': ref})
+ trust = self.assertValidTrustResponse(r, ref)
+
+ self.get('/OS-TRUST/trusts/%(trust_id)s' % {
+ 'trust_id': trust['id']},
+ expected_status=404)
+
+ auth_data = self.build_authentication_request(
+ user_id=self.trustee_user['id'],
+ password=self.trustee_user['password'],
+ trust_id=trust['id'])
+ self.v3_authenticate_token(auth_data, expected_status=401)
+
+ def test_v3_v2_intermix_trustor_not_in_default_domain_failed(self):
+ ref = self.new_trust_ref(
+ trustor_user_id=self.user_id,
+ trustee_user_id=self.default_domain_user_id,
+ project_id=self.project_id,
+ impersonation=False,
+ expires=dict(minutes=1),
+ role_ids=[self.role_id])
+
+ r = self.post('/OS-TRUST/trusts', body={'trust': ref})
+ trust = self.assertValidTrustResponse(r)
+
+ auth_data = self.build_authentication_request(
+ user_id=self.default_domain_user['id'],
+ password=self.default_domain_user['password'],
+ trust_id=trust['id'])
+ r = self.v3_authenticate_token(auth_data)
+ self.assertValidProjectTrustScopedTokenResponse(
+ r, self.default_domain_user)
+
+ token = r.headers.get('X-Subject-Token')
+
+ # now validate the v3 token with v2 API
+ path = '/v2.0/tokens/%s' % (token)
+ self.admin_request(
+ path=path, token='ADMIN', method='GET', expected_status=401)
+
+ def test_v3_v2_intermix_trustor_not_in_default_domaini_failed(self):
+ ref = self.new_trust_ref(
+ trustor_user_id=self.default_domain_user_id,
+ trustee_user_id=self.trustee_user_id,
+ project_id=self.default_domain_project_id,
+ impersonation=False,
+ expires=dict(minutes=1),
+ role_ids=[self.role_id])
+
+ auth_data = self.build_authentication_request(
+ user_id=self.default_domain_user['id'],
+ password=self.default_domain_user['password'],
+ project_id=self.default_domain_project_id)
+ token = self.get_requested_token(auth_data)
+
+ r = self.post('/OS-TRUST/trusts', body={'trust': ref}, token=token)
+ trust = self.assertValidTrustResponse(r)
+
+ auth_data = self.build_authentication_request(
+ user_id=self.trustee_user['id'],
+ password=self.trustee_user['password'],
+ trust_id=trust['id'])
+ r = self.v3_authenticate_token(auth_data)
+ self.assertValidProjectTrustScopedTokenResponse(
+ r, self.trustee_user)
+ token = r.headers.get('X-Subject-Token')
+
+ # now validate the v3 token with v2 API
+ path = '/v2.0/tokens/%s' % (token)
+ self.admin_request(
+ path=path, token='ADMIN', method='GET', expected_status=401)
+
+ def test_v3_v2_intermix_project_not_in_default_domaini_failed(self):
+ # create a trustee in default domain to delegate stuff to
+ trustee_user = self.new_user_ref(domain_id=test_v3.DEFAULT_DOMAIN_ID)
+ password = trustee_user['password']
+ trustee_user = self.identity_api.create_user(trustee_user)
+ trustee_user['password'] = password
+ trustee_user_id = trustee_user['id']
+
+ ref = self.new_trust_ref(
+ trustor_user_id=self.default_domain_user_id,
+ trustee_user_id=trustee_user_id,
+ project_id=self.project_id,
+ impersonation=False,
+ expires=dict(minutes=1),
+ role_ids=[self.role_id])
+
+ auth_data = self.build_authentication_request(
+ user_id=self.default_domain_user['id'],
+ password=self.default_domain_user['password'],
+ project_id=self.default_domain_project_id)
+ token = self.get_requested_token(auth_data)
+
+ r = self.post('/OS-TRUST/trusts', body={'trust': ref}, token=token)
+ trust = self.assertValidTrustResponse(r)
+
+ auth_data = self.build_authentication_request(
+ user_id=trustee_user['id'],
+ password=trustee_user['password'],
+ trust_id=trust['id'])
+ r = self.v3_authenticate_token(auth_data)
+ self.assertValidProjectTrustScopedTokenResponse(
+ r, trustee_user)
+ token = r.headers.get('X-Subject-Token')
+
+ # now validate the v3 token with v2 API
+ path = '/v2.0/tokens/%s' % (token)
+ self.admin_request(
+ path=path, token='ADMIN', method='GET', expected_status=401)
+
+ def test_v3_v2_intermix(self):
+ # create a trustee in default domain to delegate stuff to
+ trustee_user = self.new_user_ref(domain_id=test_v3.DEFAULT_DOMAIN_ID)
+ password = trustee_user['password']
+ trustee_user = self.identity_api.create_user(trustee_user)
+ trustee_user['password'] = password
+ trustee_user_id = trustee_user['id']
+
+ ref = self.new_trust_ref(
+ trustor_user_id=self.default_domain_user_id,
+ trustee_user_id=trustee_user_id,
+ project_id=self.default_domain_project_id,
+ impersonation=False,
+ expires=dict(minutes=1),
+ role_ids=[self.role_id])
+ auth_data = self.build_authentication_request(
+ user_id=self.default_domain_user['id'],
+ password=self.default_domain_user['password'],
+ project_id=self.default_domain_project_id)
+ token = self.get_requested_token(auth_data)
+
+ r = self.post('/OS-TRUST/trusts', body={'trust': ref}, token=token)
+ trust = self.assertValidTrustResponse(r)
+
+ auth_data = self.build_authentication_request(
+ user_id=trustee_user['id'],
+ password=trustee_user['password'],
+ trust_id=trust['id'])
+ r = self.v3_authenticate_token(auth_data)
+ self.assertValidProjectTrustScopedTokenResponse(
+ r, trustee_user)
+ token = r.headers.get('X-Subject-Token')
+
+ # now validate the v3 token with v2 API
+ path = '/v2.0/tokens/%s' % (token)
+ self.admin_request(
+ path=path, token='ADMIN', method='GET', expected_status=200)
+
+ def test_exercise_trust_scoped_token_without_impersonation(self):
+ ref = self.new_trust_ref(
+ trustor_user_id=self.user_id,
+ trustee_user_id=self.trustee_user_id,
+ project_id=self.project_id,
+ impersonation=False,
+ expires=dict(minutes=1),
+ role_ids=[self.role_id])
+
+ r = self.post('/OS-TRUST/trusts', body={'trust': ref})
+ trust = self.assertValidTrustResponse(r)
+
+ auth_data = self.build_authentication_request(
+ user_id=self.trustee_user['id'],
+ password=self.trustee_user['password'],
+ trust_id=trust['id'])
+ r = self.v3_authenticate_token(auth_data)
+ self.assertValidProjectTrustScopedTokenResponse(r, self.trustee_user)
+ self.assertEqual(r.result['token']['user']['id'],
+ self.trustee_user['id'])
+ self.assertEqual(r.result['token']['user']['name'],
+ self.trustee_user['name'])
+ self.assertEqual(r.result['token']['user']['domain']['id'],
+ self.domain['id'])
+ self.assertEqual(r.result['token']['user']['domain']['name'],
+ self.domain['name'])
+ self.assertEqual(r.result['token']['project']['id'],
+ self.project['id'])
+ self.assertEqual(r.result['token']['project']['name'],
+ self.project['name'])
+
+ def test_exercise_trust_scoped_token_with_impersonation(self):
+ ref = self.new_trust_ref(
+ trustor_user_id=self.user_id,
+ trustee_user_id=self.trustee_user_id,
+ project_id=self.project_id,
+ impersonation=True,
+ expires=dict(minutes=1),
+ role_ids=[self.role_id])
+
+ r = self.post('/OS-TRUST/trusts', body={'trust': ref})
+ trust = self.assertValidTrustResponse(r)
+
+ auth_data = self.build_authentication_request(
+ user_id=self.trustee_user['id'],
+ password=self.trustee_user['password'],
+ trust_id=trust['id'])
+ r = self.v3_authenticate_token(auth_data)
+ self.assertValidProjectTrustScopedTokenResponse(r, self.user)
+ self.assertEqual(r.result['token']['user']['id'], self.user['id'])
+ self.assertEqual(r.result['token']['user']['name'], self.user['name'])
+ self.assertEqual(r.result['token']['user']['domain']['id'],
+ self.domain['id'])
+ self.assertEqual(r.result['token']['user']['domain']['name'],
+ self.domain['name'])
+ self.assertEqual(r.result['token']['project']['id'],
+ self.project['id'])
+ self.assertEqual(r.result['token']['project']['name'],
+ self.project['name'])
+
+ def test_impersonation_token_cannot_create_new_trust(self):
+ ref = self.new_trust_ref(
+ trustor_user_id=self.user_id,
+ trustee_user_id=self.trustee_user_id,
+ project_id=self.project_id,
+ impersonation=True,
+ expires=dict(minutes=1),
+ role_ids=[self.role_id])
+
+ r = self.post('/OS-TRUST/trusts', body={'trust': ref})
+ trust = self.assertValidTrustResponse(r)
+
+ auth_data = self.build_authentication_request(
+ user_id=self.trustee_user['id'],
+ password=self.trustee_user['password'],
+ trust_id=trust['id'])
+
+ trust_token = self.get_requested_token(auth_data)
+
+ # Build second trust
+ ref = self.new_trust_ref(
+ trustor_user_id=self.user_id,
+ trustee_user_id=self.trustee_user_id,
+ project_id=self.project_id,
+ impersonation=True,
+ expires=dict(minutes=1),
+ role_ids=[self.role_id])
+
+ self.post('/OS-TRUST/trusts',
+ body={'trust': ref},
+ token=trust_token,
+ expected_status=403)
+
+ def test_trust_deleted_grant(self):
+ # create a new role
+ role = self.new_role_ref()
+ self.role_api.create_role(role['id'], role)
+
+ grant_url = (
+ '/projects/%(project_id)s/users/%(user_id)s/'
+ 'roles/%(role_id)s' % {
+ 'project_id': self.project_id,
+ 'user_id': self.user_id,
+ 'role_id': role['id']})
+
+ # assign a new role
+ self.put(grant_url)
+
+ # create a trust that delegates the new role
+ ref = self.new_trust_ref(
+ trustor_user_id=self.user_id,
+ trustee_user_id=self.trustee_user_id,
+ project_id=self.project_id,
+ impersonation=False,
+ expires=dict(minutes=1),
+ role_ids=[role['id']])
+
+ r = self.post('/OS-TRUST/trusts', body={'trust': ref})
+ trust = self.assertValidTrustResponse(r)
+
+ # delete the grant
+ self.delete(grant_url)
+
+ # attempt to get a trust token with the deleted grant
+ # and ensure it's unauthorized
+ auth_data = self.build_authentication_request(
+ user_id=self.trustee_user['id'],
+ password=self.trustee_user['password'],
+ trust_id=trust['id'])
+ r = self.v3_authenticate_token(auth_data, expected_status=403)
+
+ def test_trust_chained(self):
+ """Test that a trust token can't be used to execute another trust.
+
+ To do this, we create an A->B->C hierarchy of trusts, then attempt to
+ execute the trusts in series (C->B->A).
+
+ """
+ # create a sub-trustee user
+ sub_trustee_user = self.new_user_ref(
+ domain_id=test_v3.DEFAULT_DOMAIN_ID)
+ password = sub_trustee_user['password']
+ sub_trustee_user = self.identity_api.create_user(sub_trustee_user)
+ sub_trustee_user['password'] = password
+ sub_trustee_user_id = sub_trustee_user['id']
+
+ # create a new role
+ role = self.new_role_ref()
+ self.role_api.create_role(role['id'], role)
+
+ # assign the new role to trustee
+ self.put(
+ '/projects/%(project_id)s/users/%(user_id)s/roles/%(role_id)s' % {
+ 'project_id': self.project_id,
+ 'user_id': self.trustee_user_id,
+ 'role_id': role['id']})
+
+ # create a trust from trustor -> trustee
+ ref = self.new_trust_ref(
+ trustor_user_id=self.user_id,
+ trustee_user_id=self.trustee_user_id,
+ project_id=self.project_id,
+ impersonation=True,
+ expires=dict(minutes=1),
+ role_ids=[self.role_id])
+ r = self.post('/OS-TRUST/trusts', body={'trust': ref})
+ trust1 = self.assertValidTrustResponse(r)
+
+ # authenticate as trustee so we can create a second trust
+ auth_data = self.build_authentication_request(
+ user_id=self.trustee_user_id,
+ password=self.trustee_user['password'],
+ project_id=self.project_id)
+ token = self.get_requested_token(auth_data)
+
+ # create a trust from trustee -> sub-trustee
+ ref = self.new_trust_ref(
+ trustor_user_id=self.trustee_user_id,
+ trustee_user_id=sub_trustee_user_id,
+ project_id=self.project_id,
+ impersonation=True,
+ expires=dict(minutes=1),
+ role_ids=[role['id']])
+ r = self.post('/OS-TRUST/trusts', token=token, body={'trust': ref})
+ trust2 = self.assertValidTrustResponse(r)
+
+ # authenticate as sub-trustee and get a trust token
+ auth_data = self.build_authentication_request(
+ user_id=sub_trustee_user['id'],
+ password=sub_trustee_user['password'],
+ trust_id=trust2['id'])
+ trust_token = self.get_requested_token(auth_data)
+
+ # attempt to get the second trust using a trust token
+ auth_data = self.build_authentication_request(
+ token=trust_token,
+ trust_id=trust1['id'])
+ r = self.v3_authenticate_token(auth_data, expected_status=403)
+
+ def assertTrustTokensRevoked(self, trust_id):
+ revocation_response = self.get('/OS-REVOKE/events',
+ expected_status=200)
+ revocation_events = revocation_response.json_body['events']
+ found = False
+ for event in revocation_events:
+ if event.get('OS-TRUST:trust_id') == trust_id:
+ found = True
+ self.assertTrue(found, 'event with trust_id %s not found in list' %
+ trust_id)
+
+ def test_delete_trust_revokes_tokens(self):
+ ref = self.new_trust_ref(
+ trustor_user_id=self.user_id,
+ trustee_user_id=self.trustee_user_id,
+ project_id=self.project_id,
+ impersonation=False,
+ expires=dict(minutes=1),
+ role_ids=[self.role_id])
+ r = self.post('/OS-TRUST/trusts', body={'trust': ref})
+ trust = self.assertValidTrustResponse(r)
+ trust_id = trust['id']
+ auth_data = self.build_authentication_request(
+ user_id=self.trustee_user['id'],
+ password=self.trustee_user['password'],
+ trust_id=trust_id)
+ r = self.v3_authenticate_token(auth_data)
+ self.assertValidProjectTrustScopedTokenResponse(
+ r, self.trustee_user)
+ trust_token = r.headers['X-Subject-Token']
+ self.delete('/OS-TRUST/trusts/%(trust_id)s' % {
+ 'trust_id': trust_id},
+ expected_status=204)
+ headers = {'X-Subject-Token': trust_token}
+ self.head('/auth/tokens', headers=headers, expected_status=404)
+ self.assertTrustTokensRevoked(trust_id)
+
+ def disable_user(self, user):
+ user['enabled'] = False
+ self.identity_api.update_user(user['id'], user)
+
+ def test_trust_get_token_fails_if_trustor_disabled(self):
+ ref = self.new_trust_ref(
+ trustor_user_id=self.user_id,
+ trustee_user_id=self.trustee_user_id,
+ project_id=self.project_id,
+ impersonation=False,
+ expires=dict(minutes=1),
+ role_ids=[self.role_id])
+
+ r = self.post('/OS-TRUST/trusts', body={'trust': ref})
+
+ trust = self.assertValidTrustResponse(r, ref)
+
+ auth_data = self.build_authentication_request(
+ user_id=self.trustee_user['id'],
+ password=self.trustee_user['password'],
+ trust_id=trust['id'])
+ self.v3_authenticate_token(auth_data, expected_status=201)
+
+ self.disable_user(self.user)
+
+ auth_data = self.build_authentication_request(
+ user_id=self.trustee_user['id'],
+ password=self.trustee_user['password'],
+ trust_id=trust['id'])
+ self.v3_authenticate_token(auth_data, expected_status=403)
+
+ def test_trust_get_token_fails_if_trustee_disabled(self):
+ ref = self.new_trust_ref(
+ trustor_user_id=self.user_id,
+ trustee_user_id=self.trustee_user_id,
+ project_id=self.project_id,
+ impersonation=False,
+ expires=dict(minutes=1),
+ role_ids=[self.role_id])
+
+ r = self.post('/OS-TRUST/trusts', body={'trust': ref})
+
+ trust = self.assertValidTrustResponse(r, ref)
+
+ auth_data = self.build_authentication_request(
+ user_id=self.trustee_user['id'],
+ password=self.trustee_user['password'],
+ trust_id=trust['id'])
+ self.v3_authenticate_token(auth_data, expected_status=201)
+
+ self.disable_user(self.trustee_user)
+
+ auth_data = self.build_authentication_request(
+ user_id=self.trustee_user['id'],
+ password=self.trustee_user['password'],
+ trust_id=trust['id'])
+ self.v3_authenticate_token(auth_data, expected_status=401)
+
+ def test_delete_trust(self):
+ ref = self.new_trust_ref(
+ trustor_user_id=self.user_id,
+ trustee_user_id=self.trustee_user_id,
+ project_id=self.project_id,
+ impersonation=False,
+ expires=dict(minutes=1),
+ role_ids=[self.role_id])
+
+ r = self.post('/OS-TRUST/trusts', body={'trust': ref})
+
+ trust = self.assertValidTrustResponse(r, ref)
+
+ self.delete('/OS-TRUST/trusts/%(trust_id)s' % {
+ 'trust_id': trust['id']},
+ expected_status=204)
+
+ self.get('/OS-TRUST/trusts/%(trust_id)s' % {
+ 'trust_id': trust['id']},
+ expected_status=404)
+
+ self.get('/OS-TRUST/trusts/%(trust_id)s' % {
+ 'trust_id': trust['id']},
+ expected_status=404)
+
+ auth_data = self.build_authentication_request(
+ user_id=self.trustee_user['id'],
+ password=self.trustee_user['password'],
+ trust_id=trust['id'])
+ self.v3_authenticate_token(auth_data, expected_status=401)
+
+ def test_list_trusts(self):
+ ref = self.new_trust_ref(
+ trustor_user_id=self.user_id,
+ trustee_user_id=self.trustee_user_id,
+ project_id=self.project_id,
+ impersonation=False,
+ expires=dict(minutes=1),
+ role_ids=[self.role_id])
+
+ for i in range(3):
+ r = self.post('/OS-TRUST/trusts', body={'trust': ref})
+ self.assertValidTrustResponse(r, ref)
+
+ r = self.get('/OS-TRUST/trusts', expected_status=200)
+ trusts = r.result['trusts']
+ self.assertEqual(3, len(trusts))
+ self.assertValidTrustListResponse(r)
+
+ r = self.get('/OS-TRUST/trusts?trustor_user_id=%s' %
+ self.user_id, expected_status=200)
+ trusts = r.result['trusts']
+ self.assertEqual(3, len(trusts))
+ self.assertValidTrustListResponse(r)
+
+ r = self.get('/OS-TRUST/trusts?trustee_user_id=%s' %
+ self.user_id, expected_status=200)
+ trusts = r.result['trusts']
+ self.assertEqual(0, len(trusts))
+
+ def test_change_password_invalidates_trust_tokens(self):
+ ref = self.new_trust_ref(
+ trustor_user_id=self.user_id,
+ trustee_user_id=self.trustee_user_id,
+ project_id=self.project_id,
+ impersonation=True,
+ expires=dict(minutes=1),
+ role_ids=[self.role_id])
+
+ r = self.post('/OS-TRUST/trusts', body={'trust': ref})
+ trust = self.assertValidTrustResponse(r)
+
+ auth_data = self.build_authentication_request(
+ user_id=self.trustee_user['id'],
+ password=self.trustee_user['password'],
+ trust_id=trust['id'])
+ r = self.v3_authenticate_token(auth_data)
+
+ self.assertValidProjectTrustScopedTokenResponse(r, self.user)
+ trust_token = r.headers.get('X-Subject-Token')
+
+ self.get('/OS-TRUST/trusts?trustor_user_id=%s' %
+ self.user_id, expected_status=200,
+ token=trust_token)
+
+ self.assertValidUserResponse(
+ self.patch('/users/%s' % self.trustee_user['id'],
+ body={'user': {'password': uuid.uuid4().hex}},
+ expected_status=200))
+
+ self.get('/OS-TRUST/trusts?trustor_user_id=%s' %
+ self.user_id, expected_status=401,
+ token=trust_token)
+
+ def test_trustee_can_do_role_ops(self):
+ ref = self.new_trust_ref(
+ trustor_user_id=self.user_id,
+ trustee_user_id=self.trustee_user_id,
+ project_id=self.project_id,
+ impersonation=True,
+ role_ids=[self.role_id])
+
+ r = self.post('/OS-TRUST/trusts', body={'trust': ref})
+ trust = self.assertValidTrustResponse(r)
+
+ auth_data = self.build_authentication_request(
+ user_id=self.trustee_user['id'],
+ password=self.trustee_user['password'])
+
+ r = self.get(
+ '/OS-TRUST/trusts/%(trust_id)s/roles' % {
+ 'trust_id': trust['id']},
+ auth=auth_data)
+ self.assertValidRoleListResponse(r, self.role)
+
+ self.head(
+ '/OS-TRUST/trusts/%(trust_id)s/roles/%(role_id)s' % {
+ 'trust_id': trust['id'],
+ 'role_id': self.role['id']},
+ auth=auth_data,
+ expected_status=200)
+
+ r = self.get(
+ '/OS-TRUST/trusts/%(trust_id)s/roles/%(role_id)s' % {
+ 'trust_id': trust['id'],
+ 'role_id': self.role['id']},
+ auth=auth_data,
+ expected_status=200)
+ self.assertValidRoleResponse(r, self.role)
+
+ def test_do_not_consume_remaining_uses_when_get_token_fails(self):
+ ref = self.new_trust_ref(
+ trustor_user_id=self.user_id,
+ trustee_user_id=self.trustee_user_id,
+ project_id=self.project_id,
+ impersonation=False,
+ expires=dict(minutes=1),
+ role_ids=[self.role_id],
+ remaining_uses=3)
+ r = self.post('/OS-TRUST/trusts', body={'trust': ref})
+
+ new_trust = r.result.get('trust')
+ trust_id = new_trust.get('id')
+ # Pass in another user's ID as the trustee, the result being a failed
+ # token authenticate and the remaining_uses of the trust should not be
+ # decremented.
+ auth_data = self.build_authentication_request(
+ user_id=self.default_domain_user['id'],
+ password=self.default_domain_user['password'],
+ trust_id=trust_id)
+ self.v3_authenticate_token(auth_data, expected_status=403)
+
+ r = self.get('/OS-TRUST/trusts/%s' % trust_id)
+ self.assertEqual(3, r.result.get('trust').get('remaining_uses'))
+
+
+class TestAPIProtectionWithoutAuthContextMiddleware(test_v3.RestfulTestCase):
+ def test_api_protection_with_no_auth_context_in_env(self):
+ auth_data = self.build_authentication_request(
+ user_id=self.default_domain_user['id'],
+ password=self.default_domain_user['password'],
+ project_id=self.project['id'])
+ token = self.get_requested_token(auth_data)
+ auth_controller = auth.controllers.Auth()
+ # all we care is that auth context is not in the environment and
+ # 'token_id' is used to build the auth context instead
+ context = {'subject_token_id': token,
+ 'token_id': token,
+ 'query_string': {},
+ 'environment': {}}
+ r = auth_controller.validate_token(context)
+ self.assertEqual(200, r.status_code)
+
+
+class TestAuthContext(tests.TestCase):
+ def setUp(self):
+ super(TestAuthContext, self).setUp()
+ self.auth_context = auth.controllers.AuthContext()
+
+ def test_pick_lowest_expires_at(self):
+ expires_at_1 = timeutils.isotime(timeutils.utcnow())
+ expires_at_2 = timeutils.isotime(timeutils.utcnow() +
+ datetime.timedelta(seconds=10))
+ # make sure auth_context picks the lowest value
+ self.auth_context['expires_at'] = expires_at_1
+ self.auth_context['expires_at'] = expires_at_2
+ self.assertEqual(expires_at_1, self.auth_context['expires_at'])
+
+ def test_identity_attribute_conflict(self):
+ for identity_attr in auth.controllers.AuthContext.IDENTITY_ATTRIBUTES:
+ self.auth_context[identity_attr] = uuid.uuid4().hex
+ if identity_attr == 'expires_at':
+ # 'expires_at' is a special case. Will test it in a separate
+ # test case.
+ continue
+ self.assertRaises(exception.Unauthorized,
+ operator.setitem,
+ self.auth_context,
+ identity_attr,
+ uuid.uuid4().hex)
+
+ def test_identity_attribute_conflict_with_none_value(self):
+ for identity_attr in auth.controllers.AuthContext.IDENTITY_ATTRIBUTES:
+ self.auth_context[identity_attr] = None
+
+ if identity_attr == 'expires_at':
+ # 'expires_at' is a special case and is tested above.
+ self.auth_context['expires_at'] = uuid.uuid4().hex
+ continue
+
+ self.assertRaises(exception.Unauthorized,
+ operator.setitem,
+ self.auth_context,
+ identity_attr,
+ uuid.uuid4().hex)
+
+ def test_non_identity_attribute_conflict_override(self):
+ # for attributes Keystone doesn't know about, make sure they can be
+ # freely manipulated
+ attr_name = uuid.uuid4().hex
+ attr_val_1 = uuid.uuid4().hex
+ attr_val_2 = uuid.uuid4().hex
+ self.auth_context[attr_name] = attr_val_1
+ self.auth_context[attr_name] = attr_val_2
+ self.assertEqual(attr_val_2, self.auth_context[attr_name])
+
+
+class TestAuthSpecificData(test_v3.RestfulTestCase):
+
+ def test_get_catalog_project_scoped_token(self):
+ """Call ``GET /auth/catalog`` with a project-scoped token."""
+ r = self.get(
+ '/auth/catalog',
+ expected_status=200)
+ self.assertValidCatalogResponse(r)
+
+ def test_get_catalog_domain_scoped_token(self):
+ """Call ``GET /auth/catalog`` with a domain-scoped token."""
+ # grant a domain role to a user
+ self.put(path='/domains/%s/users/%s/roles/%s' % (
+ self.domain['id'], self.user['id'], self.role['id']))
+
+ self.get(
+ '/auth/catalog',
+ auth=self.build_authentication_request(
+ user_id=self.user['id'],
+ password=self.user['password'],
+ domain_id=self.domain['id']),
+ expected_status=403)
+
+ def test_get_catalog_unscoped_token(self):
+ """Call ``GET /auth/catalog`` with an unscoped token."""
+ self.get(
+ '/auth/catalog',
+ auth=self.build_authentication_request(
+ user_id=self.default_domain_user['id'],
+ password=self.default_domain_user['password']),
+ expected_status=403)
+
+ def test_get_catalog_no_token(self):
+ """Call ``GET /auth/catalog`` without a token."""
+ self.get(
+ '/auth/catalog',
+ noauth=True,
+ expected_status=401)
+
+ def test_get_projects_project_scoped_token(self):
+ r = self.get('/auth/projects', expected_status=200)
+ self.assertThat(r.json['projects'], matchers.HasLength(1))
+ self.assertValidProjectListResponse(r)
+
+ def test_get_domains_project_scoped_token(self):
+ self.put(path='/domains/%s/users/%s/roles/%s' % (
+ self.domain['id'], self.user['id'], self.role['id']))
+
+ r = self.get('/auth/domains', expected_status=200)
+ self.assertThat(r.json['domains'], matchers.HasLength(1))
+ self.assertValidDomainListResponse(r)
+
+
+class TestFernetTokenProvider(test_v3.RestfulTestCase):
+ def setUp(self):
+ super(TestFernetTokenProvider, self).setUp()
+ self.useFixture(ksfixtures.KeyRepository(self.config_fixture))
+
+ def _make_auth_request(self, auth_data):
+ resp = self.post('/auth/tokens', body=auth_data, expected_status=201)
+ token = resp.headers.get('X-Subject-Token')
+ self.assertLess(len(token), 255)
+ return token
+
+ def _get_unscoped_token(self):
+ auth_data = self.build_authentication_request(
+ user_id=self.user['id'],
+ password=self.user['password'])
+ return self._make_auth_request(auth_data)
+
+ def _get_project_scoped_token(self):
+ auth_data = self.build_authentication_request(
+ user_id=self.user['id'],
+ password=self.user['password'],
+ project_id=self.project_id)
+ return self._make_auth_request(auth_data)
+
+ def _get_domain_scoped_token(self):
+ auth_data = self.build_authentication_request(
+ user_id=self.user['id'],
+ password=self.user['password'],
+ domain_id=self.domain_id)
+ return self._make_auth_request(auth_data)
+
+ def _get_trust_scoped_token(self, trustee_user, trust):
+ auth_data = self.build_authentication_request(
+ user_id=trustee_user['id'],
+ password=trustee_user['password'],
+ trust_id=trust['id'])
+ return self._make_auth_request(auth_data)
+
+ def _validate_token(self, token, expected_status=200):
+ return self.get(
+ '/auth/tokens',
+ headers={'X-Subject-Token': token},
+ expected_status=expected_status)
+
+ def _revoke_token(self, token, expected_status=204):
+ return self.delete(
+ '/auth/tokens',
+ headers={'X-Subject-Token': token},
+ expected_status=expected_status)
+
+ def _set_user_enabled(self, user, enabled=True):
+ user['enabled'] = enabled
+ self.identity_api.update_user(user['id'], user)
+
+ def _create_trust(self):
+ # Create a trustee user
+ trustee_user_ref = self.new_user_ref(domain_id=self.domain_id)
+ trustee_user = self.identity_api.create_user(trustee_user_ref)
+ trustee_user['password'] = trustee_user_ref['password']
+ ref = self.new_trust_ref(
+ trustor_user_id=self.user_id,
+ trustee_user_id=trustee_user['id'],
+ project_id=self.project_id,
+ impersonation=True,
+ role_ids=[self.role_id])
+
+ # Create a trust
+ r = self.post('/OS-TRUST/trusts', body={'trust': ref})
+ trust = self.assertValidTrustResponse(r)
+ return (trustee_user, trust)
+
+ def config_overrides(self):
+ super(TestFernetTokenProvider, self).config_overrides()
+ self.config_fixture.config(
+ group='token',
+ provider='keystone.token.providers.fernet.Provider')
+
+ def test_validate_unscoped_token(self):
+ unscoped_token = self._get_unscoped_token()
+ self._validate_token(unscoped_token)
+
+ def test_validate_tampered_unscoped_token_fails(self):
+ unscoped_token = self._get_unscoped_token()
+ tampered_token = (unscoped_token[:50] + uuid.uuid4().hex +
+ unscoped_token[50 + 32:])
+ self._validate_token(tampered_token, expected_status=401)
+
+ def test_revoke_unscoped_token(self):
+ unscoped_token = self._get_unscoped_token()
+ self._validate_token(unscoped_token)
+ self._revoke_token(unscoped_token)
+ self._validate_token(unscoped_token, expected_status=404)
+
+ def test_unscoped_token_is_invalid_after_disabling_user(self):
+ unscoped_token = self._get_unscoped_token()
+ # Make sure the token is valid
+ self._validate_token(unscoped_token)
+ # Disable the user
+ self._set_user_enabled(self.user, enabled=False)
+ # Ensure validating a token for a disabled user fails
+ self.assertRaises(exception.TokenNotFound,
+ self.token_provider_api.validate_token,
+ unscoped_token)
+
+ def test_unscoped_token_is_invalid_after_enabling_disabled_user(self):
+ unscoped_token = self._get_unscoped_token()
+ # Make sure the token is valid
+ self._validate_token(unscoped_token)
+ # Disable the user
+ self._set_user_enabled(self.user, enabled=False)
+ # Ensure validating a token for a disabled user fails
+ self.assertRaises(exception.TokenNotFound,
+ self.token_provider_api.validate_token,
+ unscoped_token)
+ # Enable the user
+ self._set_user_enabled(self.user)
+ # Ensure validating a token for a re-enabled user fails
+ self.assertRaises(exception.TokenNotFound,
+ self.token_provider_api.validate_token,
+ unscoped_token)
+
+ def test_unscoped_token_is_invalid_after_disabling_user_domain(self):
+ unscoped_token = self._get_unscoped_token()
+ # Make sure the token is valid
+ self._validate_token(unscoped_token)
+ # Disable the user's domain
+ self.domain['enabled'] = False
+ self.resource_api.update_domain(self.domain['id'], self.domain)
+ # Ensure validating a token for a disabled user fails
+ self.assertRaises(exception.TokenNotFound,
+ self.token_provider_api.validate_token,
+ unscoped_token)
+
+ def test_unscoped_token_is_invalid_after_changing_user_password(self):
+ unscoped_token = self._get_unscoped_token()
+ # Make sure the token is valid
+ self._validate_token(unscoped_token)
+ # Change user's password
+ self.user['password'] = 'Password1'
+ self.identity_api.update_user(self.user['id'], self.user)
+ # Ensure updating user's password revokes existing user's tokens
+ self.assertRaises(exception.TokenNotFound,
+ self.token_provider_api.validate_token,
+ unscoped_token)
+
+ def test_validate_project_scoped_token(self):
+ project_scoped_token = self._get_project_scoped_token()
+ self._validate_token(project_scoped_token)
+
+ def test_validate_domain_scoped_token(self):
+ # Grant user access to domain
+ self.assignment_api.create_grant(self.role['id'],
+ user_id=self.user['id'],
+ domain_id=self.domain['id'])
+ domain_scoped_token = self._get_domain_scoped_token()
+ resp = self._validate_token(domain_scoped_token)
+ resp_json = json.loads(resp.body)
+ self.assertIsNotNone(resp_json['token']['catalog'])
+ self.assertIsNotNone(resp_json['token']['roles'])
+ self.assertIsNotNone(resp_json['token']['domain'])
+
+ def test_validate_tampered_project_scoped_token_fails(self):
+ project_scoped_token = self._get_project_scoped_token()
+ tampered_token = (project_scoped_token[:50] + uuid.uuid4().hex +
+ project_scoped_token[50 + 32:])
+ self._validate_token(tampered_token, expected_status=401)
+
+ def test_revoke_project_scoped_token(self):
+ project_scoped_token = self._get_project_scoped_token()
+ self._validate_token(project_scoped_token)
+ self._revoke_token(project_scoped_token)
+ self._validate_token(project_scoped_token, expected_status=404)
+
+ def test_project_scoped_token_is_invalid_after_disabling_user(self):
+ project_scoped_token = self._get_project_scoped_token()
+ # Make sure the token is valid
+ self._validate_token(project_scoped_token)
+ # Disable the user
+ self._set_user_enabled(self.user, enabled=False)
+ # Ensure validating a token for a disabled user fails
+ self.assertRaises(exception.TokenNotFound,
+ self.token_provider_api.validate_token,
+ project_scoped_token)
+
+ def test_domain_scoped_token_is_invalid_after_disabling_user(self):
+ # Grant user access to domain
+ self.assignment_api.create_grant(self.role['id'],
+ user_id=self.user['id'],
+ domain_id=self.domain['id'])
+ domain_scoped_token = self._get_domain_scoped_token()
+ # Make sure the token is valid
+ self._validate_token(domain_scoped_token)
+ # Disable user
+ self._set_user_enabled(self.user, enabled=False)
+ # Ensure validating a token for a disabled user fails
+ self.assertRaises(exception.TokenNotFound,
+ self.token_provider_api.validate_token,
+ domain_scoped_token)
+
+ def test_domain_scoped_token_is_invalid_after_deleting_grant(self):
+ # Grant user access to domain
+ self.assignment_api.create_grant(self.role['id'],
+ user_id=self.user['id'],
+ domain_id=self.domain['id'])
+ domain_scoped_token = self._get_domain_scoped_token()
+ # Make sure the token is valid
+ self._validate_token(domain_scoped_token)
+ # Delete access to domain
+ self.assignment_api.delete_grant(self.role['id'],
+ user_id=self.user['id'],
+ domain_id=self.domain['id'])
+ # Ensure validating a token for a disabled user fails
+ self.assertRaises(exception.TokenNotFound,
+ self.token_provider_api.validate_token,
+ domain_scoped_token)
+
+ def test_project_scoped_token_invalid_after_changing_user_password(self):
+ project_scoped_token = self._get_project_scoped_token()
+ # Make sure the token is valid
+ self._validate_token(project_scoped_token)
+ # Update user's password
+ self.user['password'] = 'Password1'
+ self.identity_api.update_user(self.user['id'], self.user)
+ # Ensure updating user's password revokes existing tokens
+ self.assertRaises(exception.TokenNotFound,
+ self.token_provider_api.validate_token,
+ project_scoped_token)
+
+ def test_project_scoped_token_invalid_after_disabling_project(self):
+ project_scoped_token = self._get_project_scoped_token()
+ # Make sure the token is valid
+ self._validate_token(project_scoped_token)
+ # Disable project
+ self.project['enabled'] = False
+ self.resource_api.update_project(self.project['id'], self.project)
+ # Ensure validating a token for a disabled project fails
+ self.assertRaises(exception.TokenNotFound,
+ self.token_provider_api.validate_token,
+ project_scoped_token)
+
+ def test_domain_scoped_token_invalid_after_disabling_domain(self):
+ # Grant user access to domain
+ self.assignment_api.create_grant(self.role['id'],
+ user_id=self.user['id'],
+ domain_id=self.domain['id'])
+ domain_scoped_token = self._get_domain_scoped_token()
+ # Make sure the token is valid
+ self._validate_token(domain_scoped_token)
+ # Disable domain
+ self.domain['enabled'] = False
+ self.resource_api.update_domain(self.domain['id'], self.domain)
+ # Ensure validating a token for a disabled domain fails
+ self.assertRaises(exception.TokenNotFound,
+ self.token_provider_api.validate_token,
+ domain_scoped_token)
+
+ def test_rescope_unscoped_token_with_trust(self):
+ trustee_user, trust = self._create_trust()
+ trust_scoped_token = self._get_trust_scoped_token(trustee_user, trust)
+ self.assertLess(len(trust_scoped_token), 255)
+
+ def test_validate_a_trust_scoped_token(self):
+ trustee_user, trust = self._create_trust()
+ trust_scoped_token = self._get_trust_scoped_token(trustee_user, trust)
+ # Validate a trust scoped token
+ self._validate_token(trust_scoped_token)
+
+ def test_validate_tampered_trust_scoped_token_fails(self):
+ trustee_user, trust = self._create_trust()
+ trust_scoped_token = self._get_trust_scoped_token(trustee_user, trust)
+ # Get a trust scoped token
+ tampered_token = (trust_scoped_token[:50] + uuid.uuid4().hex +
+ trust_scoped_token[50 + 32:])
+ self._validate_token(tampered_token, expected_status=401)
+
+ def test_revoke_trust_scoped_token(self):
+ trustee_user, trust = self._create_trust()
+ trust_scoped_token = self._get_trust_scoped_token(trustee_user, trust)
+ # Validate a trust scoped token
+ self._validate_token(trust_scoped_token)
+ self._revoke_token(trust_scoped_token)
+ self._validate_token(trust_scoped_token, expected_status=404)
+
+ def test_trust_scoped_token_is_invalid_after_disabling_trustee(self):
+ trustee_user, trust = self._create_trust()
+ trust_scoped_token = self._get_trust_scoped_token(trustee_user, trust)
+ # Validate a trust scoped token
+ self._validate_token(trust_scoped_token)
+
+ # Disable trustee
+ trustee_update_ref = dict(enabled=False)
+ self.identity_api.update_user(trustee_user['id'], trustee_update_ref)
+ # Ensure validating a token for a disabled user fails
+ self.assertRaises(exception.TokenNotFound,
+ self.token_provider_api.validate_token,
+ trust_scoped_token)
+
+ def test_trust_scoped_token_invalid_after_changing_trustee_password(self):
+ trustee_user, trust = self._create_trust()
+ trust_scoped_token = self._get_trust_scoped_token(trustee_user, trust)
+ # Validate a trust scoped token
+ self._validate_token(trust_scoped_token)
+ # Change trustee's password
+ trustee_update_ref = dict(password='Password1')
+ self.identity_api.update_user(trustee_user['id'], trustee_update_ref)
+ # Ensure updating trustee's password revokes existing tokens
+ self.assertRaises(exception.TokenNotFound,
+ self.token_provider_api.validate_token,
+ trust_scoped_token)
+
+ def test_trust_scoped_token_is_invalid_after_disabling_trustor(self):
+ trustee_user, trust = self._create_trust()
+ trust_scoped_token = self._get_trust_scoped_token(trustee_user, trust)
+ # Validate a trust scoped token
+ self._validate_token(trust_scoped_token)
+
+ # Disable the trustor
+ trustor_update_ref = dict(enabled=False)
+ self.identity_api.update_user(self.user['id'], trustor_update_ref)
+ # Ensure validating a token for a disabled user fails
+ self.assertRaises(exception.TokenNotFound,
+ self.token_provider_api.validate_token,
+ trust_scoped_token)
+
+ def test_trust_scoped_token_invalid_after_changing_trustor_password(self):
+ trustee_user, trust = self._create_trust()
+ trust_scoped_token = self._get_trust_scoped_token(trustee_user, trust)
+ # Validate a trust scoped token
+ self._validate_token(trust_scoped_token)
+
+ # Change trustor's password
+ trustor_update_ref = dict(password='Password1')
+ self.identity_api.update_user(self.user['id'], trustor_update_ref)
+ # Ensure updating trustor's password revokes existing user's tokens
+ self.assertRaises(exception.TokenNotFound,
+ self.token_provider_api.validate_token,
+ trust_scoped_token)
+
+ def test_trust_scoped_token_invalid_after_disabled_trustor_domain(self):
+ trustee_user, trust = self._create_trust()
+ trust_scoped_token = self._get_trust_scoped_token(trustee_user, trust)
+ # Validate a trust scoped token
+ self._validate_token(trust_scoped_token)
+
+ # Disable trustor's domain
+ self.domain['enabled'] = False
+ self.resource_api.update_domain(self.domain['id'], self.domain)
+
+ trustor_update_ref = dict(password='Password1')
+ self.identity_api.update_user(self.user['id'], trustor_update_ref)
+ # Ensure updating trustor's password revokes existing user's tokens
+ self.assertRaises(exception.TokenNotFound,
+ self.token_provider_api.validate_token,
+ trust_scoped_token)
+
+ def test_v2_validate_unscoped_token_returns_401(self):
+ """Test raised exception when validating unscoped token.
+
+ Test that validating an unscoped token in v2.0 of a v3 user of a
+ non-default domain returns unauthorized.
+ """
+ unscoped_token = self._get_unscoped_token()
+ self.assertRaises(exception.Unauthorized,
+ self.token_provider_api.validate_v2_token,
+ unscoped_token)
+
+ def test_v2_validate_domain_scoped_token_returns_401(self):
+ """Test raised exception when validating a domain scoped token.
+
+ Test that validating an domain scoped token in v2.0
+ returns unauthorized.
+ """
+
+ # Grant user access to domain
+ self.assignment_api.create_grant(self.role['id'],
+ user_id=self.user['id'],
+ domain_id=self.domain['id'])
+
+ scoped_token = self._get_domain_scoped_token()
+ self.assertRaises(exception.Unauthorized,
+ self.token_provider_api.validate_v2_token,
+ scoped_token)
+
+ def test_v2_validate_trust_scoped_token(self):
+ """Test raised exception when validating a trust scoped token.
+
+ Test that validating an trust scoped token in v2.0 returns
+ unauthorized.
+ """
+
+ trustee_user, trust = self._create_trust()
+ trust_scoped_token = self._get_trust_scoped_token(trustee_user, trust)
+ self.assertRaises(exception.Unauthorized,
+ self.token_provider_api.validate_v2_token,
+ trust_scoped_token)
+
+
+class TestAuthFernetTokenProvider(TestAuth):
+ def setUp(self):
+ super(TestAuthFernetTokenProvider, self).setUp()
+ self.useFixture(ksfixtures.KeyRepository(self.config_fixture))
+
+ def config_overrides(self):
+ super(TestAuthFernetTokenProvider, self).config_overrides()
+ self.config_fixture.config(
+ group='token',
+ provider='keystone.token.providers.fernet.Provider')
+
+ def test_verify_with_bound_token(self):
+ self.config_fixture.config(group='token', bind='kerberos')
+ auth_data = self.build_authentication_request(
+ project_id=self.project['id'])
+ remote_user = self.default_domain_user['name']
+ self.admin_app.extra_environ.update({'REMOTE_USER': remote_user,
+ 'AUTH_TYPE': 'Negotiate'})
+ # Bind not current supported by Fernet, see bug 1433311.
+ self.v3_authenticate_token(auth_data, expected_status=501)
+
+ def test_v2_v3_bind_token_intermix(self):
+ self.config_fixture.config(group='token', bind='kerberos')
+
+ # we need our own user registered to the default domain because of
+ # the way external auth works.
+ remote_user = self.default_domain_user['name']
+ self.admin_app.extra_environ.update({'REMOTE_USER': remote_user,
+ 'AUTH_TYPE': 'Negotiate'})
+ body = {'auth': {}}
+ # Bind not current supported by Fernet, see bug 1433311.
+ self.admin_request(path='/v2.0/tokens',
+ method='POST',
+ body=body,
+ expected_status=501)
+
+ def test_auth_with_bind_token(self):
+ self.config_fixture.config(group='token', bind=['kerberos'])
+
+ auth_data = self.build_authentication_request()
+ remote_user = self.default_domain_user['name']
+ self.admin_app.extra_environ.update({'REMOTE_USER': remote_user,
+ 'AUTH_TYPE': 'Negotiate'})
+ # Bind not current supported by Fernet, see bug 1433311.
+ self.v3_authenticate_token(auth_data, expected_status=501)
diff --git a/keystone-moon/keystone/tests/unit/test_v3_catalog.py b/keystone-moon/keystone/tests/unit/test_v3_catalog.py
new file mode 100644
index 00000000..d231b2e1
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/test_v3_catalog.py
@@ -0,0 +1,746 @@
+# Copyright 2013 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import copy
+import uuid
+
+from keystone import catalog
+from keystone.tests import unit as tests
+from keystone.tests.unit.ksfixtures import database
+from keystone.tests.unit import test_v3
+
+
+class CatalogTestCase(test_v3.RestfulTestCase):
+ """Test service & endpoint CRUD."""
+
+ # region crud tests
+
+ def test_create_region_with_id(self):
+ """Call ``PUT /regions/{region_id}`` w/o an ID in the request body."""
+ ref = self.new_region_ref()
+ region_id = ref.pop('id')
+ r = self.put(
+ '/regions/%s' % region_id,
+ body={'region': ref},
+ expected_status=201)
+ self.assertValidRegionResponse(r, ref)
+ # Double-check that the region ID was kept as-is and not
+ # populated with a UUID, as is the case with POST /v3/regions
+ self.assertEqual(region_id, r.json['region']['id'])
+
+ def test_create_region_with_matching_ids(self):
+ """Call ``PUT /regions/{region_id}`` with an ID in the request body."""
+ ref = self.new_region_ref()
+ region_id = ref['id']
+ r = self.put(
+ '/regions/%s' % region_id,
+ body={'region': ref},
+ expected_status=201)
+ self.assertValidRegionResponse(r, ref)
+ # Double-check that the region ID was kept as-is and not
+ # populated with a UUID, as is the case with POST /v3/regions
+ self.assertEqual(region_id, r.json['region']['id'])
+
+ def test_create_region_with_duplicate_id(self):
+ """Call ``PUT /regions/{region_id}``."""
+ ref = dict(description="my region")
+ self.put(
+ '/regions/myregion',
+ body={'region': ref}, expected_status=201)
+ # Create region again with duplicate id
+ self.put(
+ '/regions/myregion',
+ body={'region': ref}, expected_status=409)
+
+ def test_create_region(self):
+ """Call ``POST /regions`` with an ID in the request body."""
+ # the ref will have an ID defined on it
+ ref = self.new_region_ref()
+ r = self.post(
+ '/regions',
+ body={'region': ref})
+ self.assertValidRegionResponse(r, ref)
+
+ # we should be able to get the region, having defined the ID ourselves
+ r = self.get(
+ '/regions/%(region_id)s' % {
+ 'region_id': ref['id']})
+ self.assertValidRegionResponse(r, ref)
+
+ def test_create_region_with_empty_id(self):
+ """Call ``POST /regions`` with an empty ID in the request body."""
+ ref = self.new_region_ref()
+ ref['id'] = ''
+
+ r = self.post(
+ '/regions',
+ body={'region': ref}, expected_status=201)
+ self.assertValidRegionResponse(r, ref)
+ self.assertNotEmpty(r.result['region'].get('id'))
+
+ def test_create_region_without_id(self):
+ """Call ``POST /regions`` without an ID in the request body."""
+ ref = self.new_region_ref()
+
+ # instead of defining the ID ourselves...
+ del ref['id']
+
+ # let the service define the ID
+ r = self.post(
+ '/regions',
+ body={'region': ref},
+ expected_status=201)
+ self.assertValidRegionResponse(r, ref)
+
+ def test_create_region_without_description(self):
+ """Call ``POST /regions`` without description in the request body."""
+ ref = self.new_region_ref()
+
+ del ref['description']
+
+ r = self.post(
+ '/regions',
+ body={'region': ref},
+ expected_status=201)
+ # Create the description in the reference to compare to since the
+ # response should now have a description, even though we didn't send
+ # it with the original reference.
+ ref['description'] = ''
+ self.assertValidRegionResponse(r, ref)
+
+ def test_create_regions_with_same_description_string(self):
+ """Call ``POST /regions`` with same description in the request bodies.
+ """
+ # NOTE(lbragstad): Make sure we can create two regions that have the
+ # same description.
+ ref1 = self.new_region_ref()
+ ref2 = self.new_region_ref()
+
+ region_desc = 'Some Region Description'
+
+ ref1['description'] = region_desc
+ ref2['description'] = region_desc
+
+ resp1 = self.post(
+ '/regions',
+ body={'region': ref1},
+ expected_status=201)
+ self.assertValidRegionResponse(resp1, ref1)
+
+ resp2 = self.post(
+ '/regions',
+ body={'region': ref2},
+ expected_status=201)
+ self.assertValidRegionResponse(resp2, ref2)
+
+ def test_create_regions_without_descriptions(self):
+ """Call ``POST /regions`` with no description in the request bodies.
+ """
+ # NOTE(lbragstad): Make sure we can create two regions that have
+ # no description in the request body. The description should be
+ # populated by Catalog Manager.
+ ref1 = self.new_region_ref()
+ ref2 = self.new_region_ref()
+
+ del ref1['description']
+ del ref2['description']
+
+ resp1 = self.post(
+ '/regions',
+ body={'region': ref1},
+ expected_status=201)
+
+ resp2 = self.post(
+ '/regions',
+ body={'region': ref2},
+ expected_status=201)
+ # Create the descriptions in the references to compare to since the
+ # responses should now have descriptions, even though we didn't send
+ # a description with the original references.
+ ref1['description'] = ''
+ ref2['description'] = ''
+ self.assertValidRegionResponse(resp1, ref1)
+ self.assertValidRegionResponse(resp2, ref2)
+
+ def test_create_region_with_conflicting_ids(self):
+ """Call ``PUT /regions/{region_id}`` with conflicting region IDs."""
+ # the region ref is created with an ID
+ ref = self.new_region_ref()
+
+ # but instead of using that ID, make up a new, conflicting one
+ self.put(
+ '/regions/%s' % uuid.uuid4().hex,
+ body={'region': ref},
+ expected_status=400)
+
+ def test_list_regions(self):
+ """Call ``GET /regions``."""
+ r = self.get('/regions')
+ self.assertValidRegionListResponse(r, ref=self.region)
+
+ def _create_region_with_parent_id(self, parent_id=None):
+ ref = self.new_region_ref()
+ ref['parent_region_id'] = parent_id
+ return self.post(
+ '/regions',
+ body={'region': ref})
+
+ def test_list_regions_filtered_by_parent_region_id(self):
+ """Call ``GET /regions?parent_region_id={parent_region_id}``."""
+ new_region = self._create_region_with_parent_id()
+ parent_id = new_region.result['region']['id']
+
+ new_region = self._create_region_with_parent_id(parent_id)
+ new_region = self._create_region_with_parent_id(parent_id)
+
+ r = self.get('/regions?parent_region_id=%s' % parent_id)
+
+ for region in r.result['regions']:
+ self.assertEqual(parent_id, region['parent_region_id'])
+
+ def test_get_region(self):
+ """Call ``GET /regions/{region_id}``."""
+ r = self.get('/regions/%(region_id)s' % {
+ 'region_id': self.region_id})
+ self.assertValidRegionResponse(r, self.region)
+
+ def test_update_region(self):
+ """Call ``PATCH /regions/{region_id}``."""
+ region = self.new_region_ref()
+ del region['id']
+ r = self.patch('/regions/%(region_id)s' % {
+ 'region_id': self.region_id},
+ body={'region': region})
+ self.assertValidRegionResponse(r, region)
+
+ def test_delete_region(self):
+ """Call ``DELETE /regions/{region_id}``."""
+
+ ref = self.new_region_ref()
+ r = self.post(
+ '/regions',
+ body={'region': ref})
+ self.assertValidRegionResponse(r, ref)
+
+ self.delete('/regions/%(region_id)s' % {
+ 'region_id': ref['id']})
+
+ # service crud tests
+
+ def test_create_service(self):
+ """Call ``POST /services``."""
+ ref = self.new_service_ref()
+ r = self.post(
+ '/services',
+ body={'service': ref})
+ self.assertValidServiceResponse(r, ref)
+
+ def test_create_service_no_name(self):
+ """Call ``POST /services``."""
+ ref = self.new_service_ref()
+ del ref['name']
+ r = self.post(
+ '/services',
+ body={'service': ref})
+ ref['name'] = ''
+ self.assertValidServiceResponse(r, ref)
+
+ def test_create_service_no_enabled(self):
+ """Call ``POST /services``."""
+ ref = self.new_service_ref()
+ del ref['enabled']
+ r = self.post(
+ '/services',
+ body={'service': ref})
+ ref['enabled'] = True
+ self.assertValidServiceResponse(r, ref)
+ self.assertIs(True, r.result['service']['enabled'])
+
+ def test_create_service_enabled_false(self):
+ """Call ``POST /services``."""
+ ref = self.new_service_ref()
+ ref['enabled'] = False
+ r = self.post(
+ '/services',
+ body={'service': ref})
+ self.assertValidServiceResponse(r, ref)
+ self.assertIs(False, r.result['service']['enabled'])
+
+ def test_create_service_enabled_true(self):
+ """Call ``POST /services``."""
+ ref = self.new_service_ref()
+ ref['enabled'] = True
+ r = self.post(
+ '/services',
+ body={'service': ref})
+ self.assertValidServiceResponse(r, ref)
+ self.assertIs(True, r.result['service']['enabled'])
+
+ def test_create_service_enabled_str_true(self):
+ """Call ``POST /services``."""
+ ref = self.new_service_ref()
+ ref['enabled'] = 'True'
+ self.post('/services', body={'service': ref}, expected_status=400)
+
+ def test_create_service_enabled_str_false(self):
+ """Call ``POST /services``."""
+ ref = self.new_service_ref()
+ ref['enabled'] = 'False'
+ self.post('/services', body={'service': ref}, expected_status=400)
+
+ def test_create_service_enabled_str_random(self):
+ """Call ``POST /services``."""
+ ref = self.new_service_ref()
+ ref['enabled'] = 'puppies'
+ self.post('/services', body={'service': ref}, expected_status=400)
+
+ def test_list_services(self):
+ """Call ``GET /services``."""
+ r = self.get('/services')
+ self.assertValidServiceListResponse(r, ref=self.service)
+
+ def _create_random_service(self):
+ ref = self.new_service_ref()
+ ref['enabled'] = True
+ response = self.post(
+ '/services',
+ body={'service': ref})
+ return response.json['service']
+
+ def test_filter_list_services_by_type(self):
+ """Call ``GET /services?type=<some type>``."""
+ target_ref = self._create_random_service()
+
+ # create unrelated services
+ self._create_random_service()
+ self._create_random_service()
+
+ response = self.get('/services?type=' + target_ref['type'])
+ self.assertValidServiceListResponse(response, ref=target_ref)
+
+ filtered_service_list = response.json['services']
+ self.assertEqual(1, len(filtered_service_list))
+
+ filtered_service = filtered_service_list[0]
+ self.assertEqual(target_ref['type'], filtered_service['type'])
+
+ def test_filter_list_services_by_name(self):
+ """Call ``GET /services?name=<some name>``."""
+ target_ref = self._create_random_service()
+
+ # create unrelated services
+ self._create_random_service()
+ self._create_random_service()
+
+ response = self.get('/services?name=' + target_ref['name'])
+ self.assertValidServiceListResponse(response, ref=target_ref)
+
+ filtered_service_list = response.json['services']
+ self.assertEqual(1, len(filtered_service_list))
+
+ filtered_service = filtered_service_list[0]
+ self.assertEqual(target_ref['name'], filtered_service['name'])
+
+ def test_get_service(self):
+ """Call ``GET /services/{service_id}``."""
+ r = self.get('/services/%(service_id)s' % {
+ 'service_id': self.service_id})
+ self.assertValidServiceResponse(r, self.service)
+
+ def test_update_service(self):
+ """Call ``PATCH /services/{service_id}``."""
+ service = self.new_service_ref()
+ del service['id']
+ r = self.patch('/services/%(service_id)s' % {
+ 'service_id': self.service_id},
+ body={'service': service})
+ self.assertValidServiceResponse(r, service)
+
+ def test_delete_service(self):
+ """Call ``DELETE /services/{service_id}``."""
+ self.delete('/services/%(service_id)s' % {
+ 'service_id': self.service_id})
+
+ # endpoint crud tests
+
+ def test_list_endpoints(self):
+ """Call ``GET /endpoints``."""
+ r = self.get('/endpoints')
+ self.assertValidEndpointListResponse(r, ref=self.endpoint)
+
+ def test_create_endpoint_no_enabled(self):
+ """Call ``POST /endpoints``."""
+ ref = self.new_endpoint_ref(service_id=self.service_id)
+ r = self.post(
+ '/endpoints',
+ body={'endpoint': ref})
+ ref['enabled'] = True
+ self.assertValidEndpointResponse(r, ref)
+
+ def test_create_endpoint_enabled_true(self):
+ """Call ``POST /endpoints`` with enabled: true."""
+ ref = self.new_endpoint_ref(service_id=self.service_id,
+ enabled=True)
+ r = self.post(
+ '/endpoints',
+ body={'endpoint': ref})
+ self.assertValidEndpointResponse(r, ref)
+
+ def test_create_endpoint_enabled_false(self):
+ """Call ``POST /endpoints`` with enabled: false."""
+ ref = self.new_endpoint_ref(service_id=self.service_id,
+ enabled=False)
+ r = self.post(
+ '/endpoints',
+ body={'endpoint': ref})
+ self.assertValidEndpointResponse(r, ref)
+
+ def test_create_endpoint_enabled_str_true(self):
+ """Call ``POST /endpoints`` with enabled: 'True'."""
+ ref = self.new_endpoint_ref(service_id=self.service_id,
+ enabled='True')
+ self.post(
+ '/endpoints',
+ body={'endpoint': ref},
+ expected_status=400)
+
+ def test_create_endpoint_enabled_str_false(self):
+ """Call ``POST /endpoints`` with enabled: 'False'."""
+ ref = self.new_endpoint_ref(service_id=self.service_id,
+ enabled='False')
+ self.post(
+ '/endpoints',
+ body={'endpoint': ref},
+ expected_status=400)
+
+ def test_create_endpoint_enabled_str_random(self):
+ """Call ``POST /endpoints`` with enabled: 'puppies'."""
+ ref = self.new_endpoint_ref(service_id=self.service_id,
+ enabled='puppies')
+ self.post(
+ '/endpoints',
+ body={'endpoint': ref},
+ expected_status=400)
+
+ def test_create_endpoint_with_invalid_region_id(self):
+ """Call ``POST /endpoints``."""
+ ref = self.new_endpoint_ref(service_id=self.service_id)
+ ref["region_id"] = uuid.uuid4().hex
+ self.post('/endpoints', body={'endpoint': ref}, expected_status=400)
+
+ def test_create_endpoint_with_region(self):
+ """EndpointV3 creates the region before creating the endpoint, if
+ endpoint is provided with 'region' and no 'region_id'
+ """
+ ref = self.new_endpoint_ref(service_id=self.service_id)
+ ref["region"] = uuid.uuid4().hex
+ ref.pop('region_id')
+ self.post('/endpoints', body={'endpoint': ref}, expected_status=201)
+ # Make sure the region is created
+ self.get('/regions/%(region_id)s' % {
+ 'region_id': ref["region"]})
+
+ def test_create_endpoint_with_no_region(self):
+ """EndpointV3 allows to creates the endpoint without region."""
+ ref = self.new_endpoint_ref(service_id=self.service_id)
+ ref.pop('region_id')
+ self.post('/endpoints', body={'endpoint': ref}, expected_status=201)
+
+ def test_create_endpoint_with_empty_url(self):
+ """Call ``POST /endpoints``."""
+ ref = self.new_endpoint_ref(service_id=self.service_id)
+ ref["url"] = ''
+ self.post('/endpoints', body={'endpoint': ref}, expected_status=400)
+
+ def test_get_endpoint(self):
+ """Call ``GET /endpoints/{endpoint_id}``."""
+ r = self.get(
+ '/endpoints/%(endpoint_id)s' % {
+ 'endpoint_id': self.endpoint_id})
+ self.assertValidEndpointResponse(r, self.endpoint)
+
+ def test_update_endpoint(self):
+ """Call ``PATCH /endpoints/{endpoint_id}``."""
+ ref = self.new_endpoint_ref(service_id=self.service_id)
+ del ref['id']
+ r = self.patch(
+ '/endpoints/%(endpoint_id)s' % {
+ 'endpoint_id': self.endpoint_id},
+ body={'endpoint': ref})
+ ref['enabled'] = True
+ self.assertValidEndpointResponse(r, ref)
+
+ def test_update_endpoint_enabled_true(self):
+ """Call ``PATCH /endpoints/{endpoint_id}`` with enabled: True."""
+ r = self.patch(
+ '/endpoints/%(endpoint_id)s' % {
+ 'endpoint_id': self.endpoint_id},
+ body={'endpoint': {'enabled': True}})
+ self.assertValidEndpointResponse(r, self.endpoint)
+
+ def test_update_endpoint_enabled_false(self):
+ """Call ``PATCH /endpoints/{endpoint_id}`` with enabled: False."""
+ r = self.patch(
+ '/endpoints/%(endpoint_id)s' % {
+ 'endpoint_id': self.endpoint_id},
+ body={'endpoint': {'enabled': False}})
+ exp_endpoint = copy.copy(self.endpoint)
+ exp_endpoint['enabled'] = False
+ self.assertValidEndpointResponse(r, exp_endpoint)
+
+ def test_update_endpoint_enabled_str_true(self):
+ """Call ``PATCH /endpoints/{endpoint_id}`` with enabled: 'True'."""
+ self.patch(
+ '/endpoints/%(endpoint_id)s' % {
+ 'endpoint_id': self.endpoint_id},
+ body={'endpoint': {'enabled': 'True'}},
+ expected_status=400)
+
+ def test_update_endpoint_enabled_str_false(self):
+ """Call ``PATCH /endpoints/{endpoint_id}`` with enabled: 'False'."""
+ self.patch(
+ '/endpoints/%(endpoint_id)s' % {
+ 'endpoint_id': self.endpoint_id},
+ body={'endpoint': {'enabled': 'False'}},
+ expected_status=400)
+
+ def test_update_endpoint_enabled_str_random(self):
+ """Call ``PATCH /endpoints/{endpoint_id}`` with enabled: 'kitties'."""
+ self.patch(
+ '/endpoints/%(endpoint_id)s' % {
+ 'endpoint_id': self.endpoint_id},
+ body={'endpoint': {'enabled': 'kitties'}},
+ expected_status=400)
+
+ def test_delete_endpoint(self):
+ """Call ``DELETE /endpoints/{endpoint_id}``."""
+ self.delete(
+ '/endpoints/%(endpoint_id)s' % {
+ 'endpoint_id': self.endpoint_id})
+
+ def test_create_endpoint_on_v2(self):
+ # clear the v3 endpoint so we only have endpoints created on v2
+ self.delete(
+ '/endpoints/%(endpoint_id)s' % {
+ 'endpoint_id': self.endpoint_id})
+
+ # create a v3 endpoint ref, and then tweak it back to a v2-style ref
+ ref = self.new_endpoint_ref(service_id=self.service['id'])
+ del ref['id']
+ del ref['interface']
+ ref['publicurl'] = ref.pop('url')
+ ref['internalurl'] = None
+ ref['region'] = ref['region_id']
+ del ref['region_id']
+ # don't set adminurl to ensure it's absence is handled like internalurl
+
+ # create the endpoint on v2 (using a v3 token)
+ r = self.admin_request(
+ method='POST',
+ path='/v2.0/endpoints',
+ token=self.get_scoped_token(),
+ body={'endpoint': ref})
+ endpoint_v2 = r.result['endpoint']
+
+ # test the endpoint on v3
+ r = self.get('/endpoints')
+ endpoints = self.assertValidEndpointListResponse(r)
+ self.assertEqual(1, len(endpoints))
+ endpoint_v3 = endpoints.pop()
+
+ # these attributes are identical between both APIs
+ self.assertEqual(ref['region'], endpoint_v3['region_id'])
+ self.assertEqual(ref['service_id'], endpoint_v3['service_id'])
+ self.assertEqual(ref['description'], endpoint_v3['description'])
+
+ # a v2 endpoint is not quite the same concept as a v3 endpoint, so they
+ # receive different identifiers
+ self.assertNotEqual(endpoint_v2['id'], endpoint_v3['id'])
+
+ # v2 has a publicurl; v3 has a url + interface type
+ self.assertEqual(ref['publicurl'], endpoint_v3['url'])
+ self.assertEqual('public', endpoint_v3['interface'])
+
+ # tests for bug 1152632 -- these attributes were being returned by v3
+ self.assertNotIn('publicurl', endpoint_v3)
+ self.assertNotIn('adminurl', endpoint_v3)
+ self.assertNotIn('internalurl', endpoint_v3)
+
+ # test for bug 1152635 -- this attribute was being returned by v3
+ self.assertNotIn('legacy_endpoint_id', endpoint_v3)
+
+ self.assertEqual(endpoint_v2['region'], endpoint_v3['region_id'])
+
+
+class TestCatalogAPISQL(tests.TestCase):
+ """Tests for the catalog Manager against the SQL backend.
+
+ """
+
+ def setUp(self):
+ super(TestCatalogAPISQL, self).setUp()
+ self.useFixture(database.Database())
+ self.catalog_api = catalog.Manager()
+
+ self.service_id = uuid.uuid4().hex
+ service = {'id': self.service_id, 'name': uuid.uuid4().hex}
+ self.catalog_api.create_service(self.service_id, service)
+
+ endpoint = self.new_endpoint_ref(service_id=self.service_id)
+ self.catalog_api.create_endpoint(endpoint['id'], endpoint)
+
+ def config_overrides(self):
+ super(TestCatalogAPISQL, self).config_overrides()
+ self.config_fixture.config(
+ group='catalog',
+ driver='keystone.catalog.backends.sql.Catalog')
+
+ def new_endpoint_ref(self, service_id):
+ return {
+ 'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'description': uuid.uuid4().hex,
+ 'interface': uuid.uuid4().hex[:8],
+ 'service_id': service_id,
+ 'url': uuid.uuid4().hex,
+ 'region': uuid.uuid4().hex,
+ }
+
+ def test_get_catalog_ignores_endpoints_with_invalid_urls(self):
+ user_id = uuid.uuid4().hex
+ tenant_id = uuid.uuid4().hex
+
+ # the only endpoint in the catalog is the one created in setUp
+ catalog = self.catalog_api.get_v3_catalog(user_id, tenant_id)
+ self.assertEqual(1, len(catalog[0]['endpoints']))
+ # it's also the only endpoint in the backend
+ self.assertEqual(1, len(self.catalog_api.list_endpoints()))
+
+ # create a new, invalid endpoint - malformed type declaration
+ ref = self.new_endpoint_ref(self.service_id)
+ ref['url'] = 'http://keystone/%(tenant_id)'
+ self.catalog_api.create_endpoint(ref['id'], ref)
+
+ # create a new, invalid endpoint - nonexistent key
+ ref = self.new_endpoint_ref(self.service_id)
+ ref['url'] = 'http://keystone/%(you_wont_find_me)s'
+ self.catalog_api.create_endpoint(ref['id'], ref)
+
+ # verify that the invalid endpoints don't appear in the catalog
+ catalog = self.catalog_api.get_v3_catalog(user_id, tenant_id)
+ self.assertEqual(1, len(catalog[0]['endpoints']))
+ # all three appear in the backend
+ self.assertEqual(3, len(self.catalog_api.list_endpoints()))
+
+ def test_get_catalog_always_returns_service_name(self):
+ user_id = uuid.uuid4().hex
+ tenant_id = uuid.uuid4().hex
+
+ # create a service, with a name
+ named_svc = {
+ 'id': uuid.uuid4().hex,
+ 'type': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ }
+ self.catalog_api.create_service(named_svc['id'], named_svc)
+ endpoint = self.new_endpoint_ref(service_id=named_svc['id'])
+ self.catalog_api.create_endpoint(endpoint['id'], endpoint)
+
+ # create a service, with no name
+ unnamed_svc = {
+ 'id': uuid.uuid4().hex,
+ 'type': uuid.uuid4().hex
+ }
+ self.catalog_api.create_service(unnamed_svc['id'], unnamed_svc)
+ endpoint = self.new_endpoint_ref(service_id=unnamed_svc['id'])
+ self.catalog_api.create_endpoint(endpoint['id'], endpoint)
+
+ catalog = self.catalog_api.get_v3_catalog(user_id, tenant_id)
+
+ named_endpoint = [ep for ep in catalog
+ if ep['type'] == named_svc['type']][0]
+ self.assertEqual(named_svc['name'], named_endpoint['name'])
+
+ unnamed_endpoint = [ep for ep in catalog
+ if ep['type'] == unnamed_svc['type']][0]
+ self.assertEqual('', unnamed_endpoint['name'])
+
+
+# TODO(dstanek): this needs refactoring with the test above, but we are in a
+# crunch so that will happen in a future patch.
+class TestCatalogAPISQLRegions(tests.TestCase):
+ """Tests for the catalog Manager against the SQL backend.
+
+ """
+
+ def setUp(self):
+ super(TestCatalogAPISQLRegions, self).setUp()
+ self.useFixture(database.Database())
+ self.catalog_api = catalog.Manager()
+
+ def config_overrides(self):
+ super(TestCatalogAPISQLRegions, self).config_overrides()
+ self.config_fixture.config(
+ group='catalog',
+ driver='keystone.catalog.backends.sql.Catalog')
+
+ def new_endpoint_ref(self, service_id):
+ return {
+ 'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'description': uuid.uuid4().hex,
+ 'interface': uuid.uuid4().hex[:8],
+ 'service_id': service_id,
+ 'url': uuid.uuid4().hex,
+ 'region_id': uuid.uuid4().hex,
+ }
+
+ def test_get_catalog_returns_proper_endpoints_with_no_region(self):
+ service_id = uuid.uuid4().hex
+ service = {'id': service_id, 'name': uuid.uuid4().hex}
+ self.catalog_api.create_service(service_id, service)
+
+ endpoint = self.new_endpoint_ref(service_id=service_id)
+ del endpoint['region_id']
+ self.catalog_api.create_endpoint(endpoint['id'], endpoint)
+
+ user_id = uuid.uuid4().hex
+ tenant_id = uuid.uuid4().hex
+
+ catalog = self.catalog_api.get_v3_catalog(user_id, tenant_id)
+ self.assertValidCatalogEndpoint(
+ catalog[0]['endpoints'][0], ref=endpoint)
+
+ def test_get_catalog_returns_proper_endpoints_with_region(self):
+ service_id = uuid.uuid4().hex
+ service = {'id': service_id, 'name': uuid.uuid4().hex}
+ self.catalog_api.create_service(service_id, service)
+
+ endpoint = self.new_endpoint_ref(service_id=service_id)
+ self.catalog_api.create_region({'id': endpoint['region_id']})
+ self.catalog_api.create_endpoint(endpoint['id'], endpoint)
+
+ endpoint = self.catalog_api.get_endpoint(endpoint['id'])
+ user_id = uuid.uuid4().hex
+ tenant_id = uuid.uuid4().hex
+
+ catalog = self.catalog_api.get_v3_catalog(user_id, tenant_id)
+ self.assertValidCatalogEndpoint(
+ catalog[0]['endpoints'][0], ref=endpoint)
+
+ def assertValidCatalogEndpoint(self, entity, ref=None):
+ keys = ['description', 'id', 'interface', 'name', 'region_id', 'url']
+ for k in keys:
+ self.assertEqual(ref.get(k), entity[k], k)
+ self.assertEqual(entity['region_id'], entity['region'])
diff --git a/keystone-moon/keystone/tests/unit/test_v3_controller.py b/keystone-moon/keystone/tests/unit/test_v3_controller.py
new file mode 100644
index 00000000..3ac4ba5a
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/test_v3_controller.py
@@ -0,0 +1,52 @@
+# Copyright 2014 CERN.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import uuid
+
+import six
+from testtools import matchers
+
+from keystone.common import controller
+from keystone import exception
+from keystone.tests import unit as tests
+
+
+class V3ControllerTestCase(tests.TestCase):
+ """Tests for the V3Controller class."""
+ def setUp(self):
+ super(V3ControllerTestCase, self).setUp()
+
+ class ControllerUnderTest(controller.V3Controller):
+ _mutable_parameters = frozenset(['hello', 'world'])
+
+ self.api = ControllerUnderTest()
+
+ def test_check_immutable_params(self):
+ """Pass valid parameters to the method and expect no failure."""
+ ref = {
+ 'hello': uuid.uuid4().hex,
+ 'world': uuid.uuid4().hex
+ }
+ self.api.check_immutable_params(ref)
+
+ def test_check_immutable_params_fail(self):
+ """Pass invalid parameter to the method and expect failure."""
+ ref = {uuid.uuid4().hex: uuid.uuid4().hex for _ in range(3)}
+
+ ex = self.assertRaises(exception.ImmutableAttributeError,
+ self.api.check_immutable_params, ref)
+ ex_msg = six.text_type(ex)
+ self.assertThat(ex_msg, matchers.Contains(self.api.__class__.__name__))
+ for key in ref.keys():
+ self.assertThat(ex_msg, matchers.Contains(key))
diff --git a/keystone-moon/keystone/tests/unit/test_v3_credential.py b/keystone-moon/keystone/tests/unit/test_v3_credential.py
new file mode 100644
index 00000000..d792b216
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/test_v3_credential.py
@@ -0,0 +1,406 @@
+# Copyright 2013 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import hashlib
+import json
+import uuid
+
+from keystoneclient.contrib.ec2 import utils as ec2_utils
+from oslo_config import cfg
+
+from keystone import exception
+from keystone.tests.unit import test_v3
+
+
+CONF = cfg.CONF
+
+
+class CredentialBaseTestCase(test_v3.RestfulTestCase):
+ def _create_dict_blob_credential(self):
+ blob = {"access": uuid.uuid4().hex,
+ "secret": uuid.uuid4().hex}
+ credential_id = hashlib.sha256(blob['access']).hexdigest()
+ credential = self.new_credential_ref(
+ user_id=self.user['id'],
+ project_id=self.project_id)
+ credential['id'] = credential_id
+
+ # Store the blob as a dict *not* JSON ref bug #1259584
+ # This means we can test the dict->json workaround, added
+ # as part of the bugfix for backwards compatibility works.
+ credential['blob'] = blob
+ credential['type'] = 'ec2'
+ # Create direct via the DB API to avoid validation failure
+ self.credential_api.create_credential(
+ credential_id,
+ credential)
+ expected_blob = json.dumps(blob)
+ return expected_blob, credential_id
+
+
+class CredentialTestCase(CredentialBaseTestCase):
+ """Test credential CRUD."""
+ def setUp(self):
+
+ super(CredentialTestCase, self).setUp()
+
+ self.credential_id = uuid.uuid4().hex
+ self.credential = self.new_credential_ref(
+ user_id=self.user['id'],
+ project_id=self.project_id)
+ self.credential['id'] = self.credential_id
+ self.credential_api.create_credential(
+ self.credential_id,
+ self.credential)
+
+ def test_credential_api_delete_credentials_for_project(self):
+ self.credential_api.delete_credentials_for_project(self.project_id)
+ # Test that the credential that we created in .setUp no longer exists
+ # once we delete all credentials for self.project_id
+ self.assertRaises(exception.CredentialNotFound,
+ self.credential_api.get_credential,
+ credential_id=self.credential_id)
+
+ def test_credential_api_delete_credentials_for_user(self):
+ self.credential_api.delete_credentials_for_user(self.user_id)
+ # Test that the credential that we created in .setUp no longer exists
+ # once we delete all credentials for self.user_id
+ self.assertRaises(exception.CredentialNotFound,
+ self.credential_api.get_credential,
+ credential_id=self.credential_id)
+
+ def test_list_credentials(self):
+ """Call ``GET /credentials``."""
+ r = self.get('/credentials')
+ self.assertValidCredentialListResponse(r, ref=self.credential)
+
+ def test_list_credentials_filtered_by_user_id(self):
+ """Call ``GET /credentials?user_id={user_id}``."""
+ credential = self.new_credential_ref(
+ user_id=uuid.uuid4().hex)
+ self.credential_api.create_credential(
+ credential['id'], credential)
+
+ r = self.get('/credentials?user_id=%s' % self.user['id'])
+ self.assertValidCredentialListResponse(r, ref=self.credential)
+ for cred in r.result['credentials']:
+ self.assertEqual(self.user['id'], cred['user_id'])
+
+ def test_create_credential(self):
+ """Call ``POST /credentials``."""
+ ref = self.new_credential_ref(user_id=self.user['id'])
+ r = self.post(
+ '/credentials',
+ body={'credential': ref})
+ self.assertValidCredentialResponse(r, ref)
+
+ def test_get_credential(self):
+ """Call ``GET /credentials/{credential_id}``."""
+ r = self.get(
+ '/credentials/%(credential_id)s' % {
+ 'credential_id': self.credential_id})
+ self.assertValidCredentialResponse(r, self.credential)
+
+ def test_update_credential(self):
+ """Call ``PATCH /credentials/{credential_id}``."""
+ ref = self.new_credential_ref(
+ user_id=self.user['id'],
+ project_id=self.project_id)
+ del ref['id']
+ r = self.patch(
+ '/credentials/%(credential_id)s' % {
+ 'credential_id': self.credential_id},
+ body={'credential': ref})
+ self.assertValidCredentialResponse(r, ref)
+
+ def test_delete_credential(self):
+ """Call ``DELETE /credentials/{credential_id}``."""
+ self.delete(
+ '/credentials/%(credential_id)s' % {
+ 'credential_id': self.credential_id})
+
+ def test_create_ec2_credential(self):
+ """Call ``POST /credentials`` for creating ec2 credential."""
+ ref = self.new_credential_ref(user_id=self.user['id'],
+ project_id=self.project_id)
+ blob = {"access": uuid.uuid4().hex,
+ "secret": uuid.uuid4().hex}
+ ref['blob'] = json.dumps(blob)
+ ref['type'] = 'ec2'
+ r = self.post(
+ '/credentials',
+ body={'credential': ref})
+ self.assertValidCredentialResponse(r, ref)
+ # Assert credential id is same as hash of access key id for
+ # ec2 credentials
+ self.assertEqual(r.result['credential']['id'],
+ hashlib.sha256(blob['access']).hexdigest())
+ # Create second ec2 credential with the same access key id and check
+ # for conflict.
+ self.post(
+ '/credentials',
+ body={'credential': ref}, expected_status=409)
+
+ def test_get_ec2_dict_blob(self):
+ """Ensure non-JSON blob data is correctly converted."""
+ expected_blob, credential_id = self._create_dict_blob_credential()
+
+ r = self.get(
+ '/credentials/%(credential_id)s' % {
+ 'credential_id': credential_id})
+ self.assertEqual(expected_blob, r.result['credential']['blob'])
+
+ def test_list_ec2_dict_blob(self):
+ """Ensure non-JSON blob data is correctly converted."""
+ expected_blob, credential_id = self._create_dict_blob_credential()
+
+ list_r = self.get('/credentials')
+ list_creds = list_r.result['credentials']
+ list_ids = [r['id'] for r in list_creds]
+ self.assertIn(credential_id, list_ids)
+ for r in list_creds:
+ if r['id'] == credential_id:
+ self.assertEqual(expected_blob, r['blob'])
+
+ def test_create_non_ec2_credential(self):
+ """Call ``POST /credentials`` for creating non-ec2 credential."""
+ ref = self.new_credential_ref(user_id=self.user['id'])
+ blob = {"access": uuid.uuid4().hex,
+ "secret": uuid.uuid4().hex}
+ ref['blob'] = json.dumps(blob)
+ r = self.post(
+ '/credentials',
+ body={'credential': ref})
+ self.assertValidCredentialResponse(r, ref)
+ # Assert credential id is not same as hash of access key id for
+ # non-ec2 credentials
+ self.assertNotEqual(r.result['credential']['id'],
+ hashlib.sha256(blob['access']).hexdigest())
+
+ def test_create_ec2_credential_with_missing_project_id(self):
+ """Call ``POST /credentials`` for creating ec2
+ credential with missing project_id.
+ """
+ ref = self.new_credential_ref(user_id=self.user['id'])
+ blob = {"access": uuid.uuid4().hex,
+ "secret": uuid.uuid4().hex}
+ ref['blob'] = json.dumps(blob)
+ ref['type'] = 'ec2'
+ # Assert 400 status for bad request with missing project_id
+ self.post(
+ '/credentials',
+ body={'credential': ref}, expected_status=400)
+
+ def test_create_ec2_credential_with_invalid_blob(self):
+ """Call ``POST /credentials`` for creating ec2
+ credential with invalid blob.
+ """
+ ref = self.new_credential_ref(user_id=self.user['id'],
+ project_id=self.project_id)
+ ref['blob'] = '{"abc":"def"d}'
+ ref['type'] = 'ec2'
+ # Assert 400 status for bad request containing invalid
+ # blob
+ response = self.post(
+ '/credentials',
+ body={'credential': ref}, expected_status=400)
+ self.assertValidErrorResponse(response)
+
+ def test_create_credential_with_admin_token(self):
+ # Make sure we can create credential with the static admin token
+ ref = self.new_credential_ref(user_id=self.user['id'])
+ r = self.post(
+ '/credentials',
+ body={'credential': ref},
+ token=CONF.admin_token)
+ self.assertValidCredentialResponse(r, ref)
+
+
+class TestCredentialTrustScoped(test_v3.RestfulTestCase):
+ """Test credential with trust scoped token."""
+ def setUp(self):
+ super(TestCredentialTrustScoped, self).setUp()
+
+ self.trustee_user = self.new_user_ref(domain_id=self.domain_id)
+ password = self.trustee_user['password']
+ self.trustee_user = self.identity_api.create_user(self.trustee_user)
+ self.trustee_user['password'] = password
+ self.trustee_user_id = self.trustee_user['id']
+
+ def config_overrides(self):
+ super(TestCredentialTrustScoped, self).config_overrides()
+ self.config_fixture.config(group='trust', enabled=True)
+
+ def test_trust_scoped_ec2_credential(self):
+ """Call ``POST /credentials`` for creating ec2 credential."""
+ # Create the trust
+ ref = self.new_trust_ref(
+ trustor_user_id=self.user_id,
+ trustee_user_id=self.trustee_user_id,
+ project_id=self.project_id,
+ impersonation=True,
+ expires=dict(minutes=1),
+ role_ids=[self.role_id])
+ del ref['id']
+ r = self.post('/OS-TRUST/trusts', body={'trust': ref})
+ trust = self.assertValidTrustResponse(r)
+
+ # Get a trust scoped token
+ auth_data = self.build_authentication_request(
+ user_id=self.trustee_user['id'],
+ password=self.trustee_user['password'],
+ trust_id=trust['id'])
+ r = self.v3_authenticate_token(auth_data)
+ self.assertValidProjectTrustScopedTokenResponse(r, self.user)
+ trust_id = r.result['token']['OS-TRUST:trust']['id']
+ token_id = r.headers.get('X-Subject-Token')
+
+ # Create the credential with the trust scoped token
+ ref = self.new_credential_ref(user_id=self.user['id'],
+ project_id=self.project_id)
+ blob = {"access": uuid.uuid4().hex,
+ "secret": uuid.uuid4().hex}
+ ref['blob'] = json.dumps(blob)
+ ref['type'] = 'ec2'
+ r = self.post(
+ '/credentials',
+ body={'credential': ref},
+ token=token_id)
+
+ # We expect the response blob to contain the trust_id
+ ret_ref = ref.copy()
+ ret_blob = blob.copy()
+ ret_blob['trust_id'] = trust_id
+ ret_ref['blob'] = json.dumps(ret_blob)
+ self.assertValidCredentialResponse(r, ref=ret_ref)
+
+ # Assert credential id is same as hash of access key id for
+ # ec2 credentials
+ self.assertEqual(r.result['credential']['id'],
+ hashlib.sha256(blob['access']).hexdigest())
+
+ # Create second ec2 credential with the same access key id and check
+ # for conflict.
+ self.post(
+ '/credentials',
+ body={'credential': ref},
+ token=token_id,
+ expected_status=409)
+
+
+class TestCredentialEc2(CredentialBaseTestCase):
+ """Test v3 credential compatibility with ec2tokens."""
+ def setUp(self):
+ super(TestCredentialEc2, self).setUp()
+
+ def _validate_signature(self, access, secret):
+ """Test signature validation with the access/secret provided."""
+ signer = ec2_utils.Ec2Signer(secret)
+ params = {'SignatureMethod': 'HmacSHA256',
+ 'SignatureVersion': '2',
+ 'AWSAccessKeyId': access}
+ request = {'host': 'foo',
+ 'verb': 'GET',
+ 'path': '/bar',
+ 'params': params}
+ signature = signer.generate(request)
+
+ # Now make a request to validate the signed dummy request via the
+ # ec2tokens API. This proves the v3 ec2 credentials actually work.
+ sig_ref = {'access': access,
+ 'signature': signature,
+ 'host': 'foo',
+ 'verb': 'GET',
+ 'path': '/bar',
+ 'params': params}
+ r = self.post(
+ '/ec2tokens',
+ body={'ec2Credentials': sig_ref},
+ expected_status=200)
+ self.assertValidTokenResponse(r)
+
+ def test_ec2_credential_signature_validate(self):
+ """Test signature validation with a v3 ec2 credential."""
+ ref = self.new_credential_ref(
+ user_id=self.user['id'],
+ project_id=self.project_id)
+ blob = {"access": uuid.uuid4().hex,
+ "secret": uuid.uuid4().hex}
+ ref['blob'] = json.dumps(blob)
+ ref['type'] = 'ec2'
+ r = self.post(
+ '/credentials',
+ body={'credential': ref})
+ self.assertValidCredentialResponse(r, ref)
+ # Assert credential id is same as hash of access key id
+ self.assertEqual(r.result['credential']['id'],
+ hashlib.sha256(blob['access']).hexdigest())
+
+ cred_blob = json.loads(r.result['credential']['blob'])
+ self.assertEqual(blob, cred_blob)
+ self._validate_signature(access=cred_blob['access'],
+ secret=cred_blob['secret'])
+
+ def test_ec2_credential_signature_validate_legacy(self):
+ """Test signature validation with a legacy v3 ec2 credential."""
+ cred_json, credential_id = self._create_dict_blob_credential()
+ cred_blob = json.loads(cred_json)
+ self._validate_signature(access=cred_blob['access'],
+ secret=cred_blob['secret'])
+
+ def _get_ec2_cred_uri(self):
+ return '/users/%s/credentials/OS-EC2' % self.user_id
+
+ def _get_ec2_cred(self):
+ uri = self._get_ec2_cred_uri()
+ r = self.post(uri, body={'tenant_id': self.project_id})
+ return r.result['credential']
+
+ def test_ec2_create_credential(self):
+ """Test ec2 credential creation."""
+ ec2_cred = self._get_ec2_cred()
+ self.assertEqual(self.user_id, ec2_cred['user_id'])
+ self.assertEqual(self.project_id, ec2_cred['tenant_id'])
+ self.assertIsNone(ec2_cred['trust_id'])
+ self._validate_signature(access=ec2_cred['access'],
+ secret=ec2_cred['secret'])
+
+ return ec2_cred
+
+ def test_ec2_get_credential(self):
+ ec2_cred = self._get_ec2_cred()
+ uri = '/'.join([self._get_ec2_cred_uri(), ec2_cred['access']])
+ r = self.get(uri)
+ self.assertDictEqual(ec2_cred, r.result['credential'])
+
+ def test_ec2_list_credentials(self):
+ """Test ec2 credential listing."""
+ self._get_ec2_cred()
+ uri = self._get_ec2_cred_uri()
+ r = self.get(uri)
+ cred_list = r.result['credentials']
+ self.assertEqual(1, len(cred_list))
+
+ def test_ec2_delete_credential(self):
+ """Test ec2 credential deletion."""
+ ec2_cred = self._get_ec2_cred()
+ uri = '/'.join([self._get_ec2_cred_uri(), ec2_cred['access']])
+ cred_from_credential_api = (
+ self.credential_api
+ .list_credentials_for_user(self.user_id))
+ self.assertEqual(1, len(cred_from_credential_api))
+ self.delete(uri)
+ self.assertRaises(exception.CredentialNotFound,
+ self.credential_api.get_credential,
+ cred_from_credential_api[0]['id'])
diff --git a/keystone-moon/keystone/tests/unit/test_v3_domain_config.py b/keystone-moon/keystone/tests/unit/test_v3_domain_config.py
new file mode 100644
index 00000000..6f96f0e7
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/test_v3_domain_config.py
@@ -0,0 +1,210 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import copy
+import uuid
+
+from oslo_config import cfg
+
+from keystone import exception
+from keystone.tests.unit import test_v3
+
+
+CONF = cfg.CONF
+
+
+class DomainConfigTestCase(test_v3.RestfulTestCase):
+ """Test domain config support."""
+
+ def setUp(self):
+ super(DomainConfigTestCase, self).setUp()
+
+ self.domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.resource_api.create_domain(self.domain['id'], self.domain)
+ self.config = {'ldap': {'url': uuid.uuid4().hex,
+ 'user_tree_dn': uuid.uuid4().hex},
+ 'identity': {'driver': uuid.uuid4().hex}}
+
+ def test_create_config(self):
+ """Call ``PUT /domains/{domain_id}/config``."""
+ url = '/domains/%(domain_id)s/config' % {
+ 'domain_id': self.domain['id']}
+ r = self.put(url, body={'config': self.config},
+ expected_status=201)
+ res = self.domain_config_api.get_config(self.domain['id'])
+ self.assertEqual(self.config, r.result['config'])
+ self.assertEqual(self.config, res)
+
+ def test_create_config_twice(self):
+ """Check multiple creates don't throw error"""
+ self.put('/domains/%(domain_id)s/config' % {
+ 'domain_id': self.domain['id']},
+ body={'config': self.config},
+ expected_status=201)
+ self.put('/domains/%(domain_id)s/config' % {
+ 'domain_id': self.domain['id']},
+ body={'config': self.config},
+ expected_status=200)
+
+ def test_delete_config(self):
+ """Call ``DELETE /domains{domain_id}/config``."""
+ self.domain_config_api.create_config(self.domain['id'], self.config)
+ self.delete('/domains/%(domain_id)s/config' % {
+ 'domain_id': self.domain['id']})
+ self.get('/domains/%(domain_id)s/config' % {
+ 'domain_id': self.domain['id']},
+ expected_status=exception.DomainConfigNotFound.code)
+
+ def test_delete_config_by_group(self):
+ """Call ``DELETE /domains{domain_id}/config/{group}``."""
+ self.domain_config_api.create_config(self.domain['id'], self.config)
+ self.delete('/domains/%(domain_id)s/config/ldap' % {
+ 'domain_id': self.domain['id']})
+ res = self.domain_config_api.get_config(self.domain['id'])
+ self.assertNotIn('ldap', res)
+
+ def test_get_head_config(self):
+ """Call ``GET & HEAD for /domains{domain_id}/config``."""
+ self.domain_config_api.create_config(self.domain['id'], self.config)
+ url = '/domains/%(domain_id)s/config' % {
+ 'domain_id': self.domain['id']}
+ r = self.get(url)
+ self.assertEqual(self.config, r.result['config'])
+ self.head(url, expected_status=200)
+
+ def test_get_config_by_group(self):
+ """Call ``GET & HEAD /domains{domain_id}/config/{group}``."""
+ self.domain_config_api.create_config(self.domain['id'], self.config)
+ url = '/domains/%(domain_id)s/config/ldap' % {
+ 'domain_id': self.domain['id']}
+ r = self.get(url)
+ self.assertEqual({'ldap': self.config['ldap']}, r.result['config'])
+ self.head(url, expected_status=200)
+
+ def test_get_config_by_option(self):
+ """Call ``GET & HEAD /domains{domain_id}/config/{group}/{option}``."""
+ self.domain_config_api.create_config(self.domain['id'], self.config)
+ url = '/domains/%(domain_id)s/config/ldap/url' % {
+ 'domain_id': self.domain['id']}
+ r = self.get(url)
+ self.assertEqual({'url': self.config['ldap']['url']},
+ r.result['config'])
+ self.head(url, expected_status=200)
+
+ def test_get_non_existant_config(self):
+ """Call ``GET /domains{domain_id}/config when no config defined``."""
+ self.get('/domains/%(domain_id)s/config' % {
+ 'domain_id': self.domain['id']}, expected_status=404)
+
+ def test_get_non_existant_config_group(self):
+ """Call ``GET /domains{domain_id}/config/{group_not_exist}``."""
+ config = {'ldap': {'url': uuid.uuid4().hex}}
+ self.domain_config_api.create_config(self.domain['id'], config)
+ self.get('/domains/%(domain_id)s/config/identity' % {
+ 'domain_id': self.domain['id']}, expected_status=404)
+
+ def test_get_non_existant_config_option(self):
+ """Call ``GET /domains{domain_id}/config/group/{option_not_exist}``."""
+ config = {'ldap': {'url': uuid.uuid4().hex}}
+ self.domain_config_api.create_config(self.domain['id'], config)
+ self.get('/domains/%(domain_id)s/config/ldap/user_tree_dn' % {
+ 'domain_id': self.domain['id']}, expected_status=404)
+
+ def test_update_config(self):
+ """Call ``PATCH /domains/{domain_id}/config``."""
+ self.domain_config_api.create_config(self.domain['id'], self.config)
+ new_config = {'ldap': {'url': uuid.uuid4().hex},
+ 'identity': {'driver': uuid.uuid4().hex}}
+ r = self.patch('/domains/%(domain_id)s/config' % {
+ 'domain_id': self.domain['id']},
+ body={'config': new_config})
+ res = self.domain_config_api.get_config(self.domain['id'])
+ expected_config = copy.deepcopy(self.config)
+ expected_config['ldap']['url'] = new_config['ldap']['url']
+ expected_config['identity']['driver'] = (
+ new_config['identity']['driver'])
+ self.assertEqual(expected_config, r.result['config'])
+ self.assertEqual(expected_config, res)
+
+ def test_update_config_group(self):
+ """Call ``PATCH /domains/{domain_id}/config/{group}``."""
+ self.domain_config_api.create_config(self.domain['id'], self.config)
+ new_config = {'ldap': {'url': uuid.uuid4().hex,
+ 'user_filter': uuid.uuid4().hex}}
+ r = self.patch('/domains/%(domain_id)s/config/ldap' % {
+ 'domain_id': self.domain['id']},
+ body={'config': new_config})
+ res = self.domain_config_api.get_config(self.domain['id'])
+ expected_config = copy.deepcopy(self.config)
+ expected_config['ldap']['url'] = new_config['ldap']['url']
+ expected_config['ldap']['user_filter'] = (
+ new_config['ldap']['user_filter'])
+ self.assertEqual(expected_config, r.result['config'])
+ self.assertEqual(expected_config, res)
+
+ def test_update_config_invalid_group(self):
+ """Call ``PATCH /domains/{domain_id}/config/{invalid_group}``."""
+ self.domain_config_api.create_config(self.domain['id'], self.config)
+
+ # Trying to update a group that is neither whitelisted or sensitive
+ # should result in Forbidden.
+ invalid_group = uuid.uuid4().hex
+ new_config = {invalid_group: {'url': uuid.uuid4().hex,
+ 'user_filter': uuid.uuid4().hex}}
+ self.patch('/domains/%(domain_id)s/config/%(invalid_group)s' % {
+ 'domain_id': self.domain['id'], 'invalid_group': invalid_group},
+ body={'config': new_config},
+ expected_status=403)
+ # Trying to update a valid group, but one that is not in the current
+ # config should result in NotFound
+ config = {'ldap': {'suffix': uuid.uuid4().hex}}
+ self.domain_config_api.create_config(self.domain['id'], config)
+ new_config = {'identity': {'driver': uuid.uuid4().hex}}
+ self.patch('/domains/%(domain_id)s/config/identity' % {
+ 'domain_id': self.domain['id']},
+ body={'config': new_config},
+ expected_status=404)
+
+ def test_update_config_option(self):
+ """Call ``PATCH /domains/{domain_id}/config/{group}/{option}``."""
+ self.domain_config_api.create_config(self.domain['id'], self.config)
+ new_config = {'url': uuid.uuid4().hex}
+ r = self.patch('/domains/%(domain_id)s/config/ldap/url' % {
+ 'domain_id': self.domain['id']},
+ body={'config': new_config})
+ res = self.domain_config_api.get_config(self.domain['id'])
+ expected_config = copy.deepcopy(self.config)
+ expected_config['ldap']['url'] = new_config['url']
+ self.assertEqual(expected_config, r.result['config'])
+ self.assertEqual(expected_config, res)
+
+ def test_update_config_invalid_option(self):
+ """Call ``PATCH /domains/{domain_id}/config/{group}/{invalid}``."""
+ self.domain_config_api.create_config(self.domain['id'], self.config)
+ invalid_option = uuid.uuid4().hex
+ new_config = {'ldap': {invalid_option: uuid.uuid4().hex}}
+ # Trying to update an option that is neither whitelisted or sensitive
+ # should result in Forbidden.
+ self.patch(
+ '/domains/%(domain_id)s/config/ldap/%(invalid_option)s' % {
+ 'domain_id': self.domain['id'],
+ 'invalid_option': invalid_option},
+ body={'config': new_config},
+ expected_status=403)
+ # Trying to update a valid option, but one that is not in the current
+ # config should result in NotFound
+ new_config = {'suffix': uuid.uuid4().hex}
+ self.patch(
+ '/domains/%(domain_id)s/config/ldap/suffix' % {
+ 'domain_id': self.domain['id']},
+ body={'config': new_config},
+ expected_status=404)
diff --git a/keystone-moon/keystone/tests/unit/test_v3_endpoint_policy.py b/keystone-moon/keystone/tests/unit/test_v3_endpoint_policy.py
new file mode 100644
index 00000000..437fb155
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/test_v3_endpoint_policy.py
@@ -0,0 +1,251 @@
+# Copyright 2014 IBM Corp.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from testtools import matchers
+
+from keystone.tests.unit import test_v3
+
+
+class TestExtensionCase(test_v3.RestfulTestCase):
+
+ EXTENSION_NAME = 'endpoint_policy'
+ EXTENSION_TO_ADD = 'endpoint_policy_extension'
+
+
+class EndpointPolicyTestCase(TestExtensionCase):
+ """Test endpoint policy CRUD.
+
+ In general, the controller layer of the endpoint policy extension is really
+ just marshalling the data around the underlying manager calls. Given that
+ the manager layer is tested in depth by the backend tests, the tests we
+ execute here concentrate on ensuring we are correctly passing and
+ presenting the data.
+
+ """
+
+ def setUp(self):
+ super(EndpointPolicyTestCase, self).setUp()
+ self.policy = self.new_policy_ref()
+ self.policy_api.create_policy(self.policy['id'], self.policy)
+ self.service = self.new_service_ref()
+ self.catalog_api.create_service(self.service['id'], self.service)
+ self.endpoint = self.new_endpoint_ref(self.service['id'], enabled=True)
+ self.catalog_api.create_endpoint(self.endpoint['id'], self.endpoint)
+ self.region = self.new_region_ref()
+ self.catalog_api.create_region(self.region)
+
+ def assert_head_and_get_return_same_response(self, url, expected_status):
+ self.get(url, expected_status=expected_status)
+ self.head(url, expected_status=expected_status)
+
+ # endpoint policy crud tests
+ def _crud_test(self, url):
+ # Test when the resource does not exist also ensures
+ # that there is not a false negative after creation.
+
+ self.assert_head_and_get_return_same_response(url, expected_status=404)
+
+ self.put(url, expected_status=204)
+
+ # test that the new resource is accessible.
+ self.assert_head_and_get_return_same_response(url, expected_status=204)
+
+ self.delete(url, expected_status=204)
+
+ # test that the deleted resource is no longer accessible
+ self.assert_head_and_get_return_same_response(url, expected_status=404)
+
+ def test_crud_for_policy_for_explicit_endpoint(self):
+ """PUT, HEAD and DELETE for explicit endpoint policy."""
+
+ url = ('/policies/%(policy_id)s/OS-ENDPOINT-POLICY'
+ '/endpoints/%(endpoint_id)s') % {
+ 'policy_id': self.policy['id'],
+ 'endpoint_id': self.endpoint['id']}
+ self._crud_test(url)
+
+ def test_crud_for_policy_for_service(self):
+ """PUT, HEAD and DELETE for service endpoint policy."""
+
+ url = ('/policies/%(policy_id)s/OS-ENDPOINT-POLICY'
+ '/services/%(service_id)s') % {
+ 'policy_id': self.policy['id'],
+ 'service_id': self.service['id']}
+ self._crud_test(url)
+
+ def test_crud_for_policy_for_region_and_service(self):
+ """PUT, HEAD and DELETE for region and service endpoint policy."""
+
+ url = ('/policies/%(policy_id)s/OS-ENDPOINT-POLICY'
+ '/services/%(service_id)s/regions/%(region_id)s') % {
+ 'policy_id': self.policy['id'],
+ 'service_id': self.service['id'],
+ 'region_id': self.region['id']}
+ self._crud_test(url)
+
+ def test_get_policy_for_endpoint(self):
+ """GET /endpoints/{endpoint_id}/policy."""
+
+ self.put('/policies/%(policy_id)s/OS-ENDPOINT-POLICY'
+ '/endpoints/%(endpoint_id)s' % {
+ 'policy_id': self.policy['id'],
+ 'endpoint_id': self.endpoint['id']},
+ expected_status=204)
+
+ self.head('/endpoints/%(endpoint_id)s/OS-ENDPOINT-POLICY'
+ '/policy' % {
+ 'endpoint_id': self.endpoint['id']},
+ expected_status=200)
+
+ r = self.get('/endpoints/%(endpoint_id)s/OS-ENDPOINT-POLICY'
+ '/policy' % {
+ 'endpoint_id': self.endpoint['id']},
+ expected_status=200)
+ self.assertValidPolicyResponse(r, ref=self.policy)
+
+ def test_list_endpoints_for_policy(self):
+ """GET /policies/%(policy_id}/endpoints."""
+
+ self.put('/policies/%(policy_id)s/OS-ENDPOINT-POLICY'
+ '/endpoints/%(endpoint_id)s' % {
+ 'policy_id': self.policy['id'],
+ 'endpoint_id': self.endpoint['id']},
+ expected_status=204)
+
+ r = self.get('/policies/%(policy_id)s/OS-ENDPOINT-POLICY'
+ '/endpoints' % {
+ 'policy_id': self.policy['id']},
+ expected_status=200)
+ self.assertValidEndpointListResponse(r, ref=self.endpoint)
+ self.assertThat(r.result.get('endpoints'), matchers.HasLength(1))
+
+ def test_endpoint_association_cleanup_when_endpoint_deleted(self):
+ url = ('/policies/%(policy_id)s/OS-ENDPOINT-POLICY'
+ '/endpoints/%(endpoint_id)s') % {
+ 'policy_id': self.policy['id'],
+ 'endpoint_id': self.endpoint['id']}
+
+ self.put(url, expected_status=204)
+ self.head(url, expected_status=204)
+
+ self.delete('/endpoints/%(endpoint_id)s' % {
+ 'endpoint_id': self.endpoint['id']})
+
+ self.head(url, expected_status=404)
+
+ def test_region_service_association_cleanup_when_region_deleted(self):
+ url = ('/policies/%(policy_id)s/OS-ENDPOINT-POLICY'
+ '/services/%(service_id)s/regions/%(region_id)s') % {
+ 'policy_id': self.policy['id'],
+ 'service_id': self.service['id'],
+ 'region_id': self.region['id']}
+
+ self.put(url, expected_status=204)
+ self.head(url, expected_status=204)
+
+ self.delete('/regions/%(region_id)s' % {
+ 'region_id': self.region['id']})
+
+ self.head(url, expected_status=404)
+
+ def test_region_service_association_cleanup_when_service_deleted(self):
+ url = ('/policies/%(policy_id)s/OS-ENDPOINT-POLICY'
+ '/services/%(service_id)s/regions/%(region_id)s') % {
+ 'policy_id': self.policy['id'],
+ 'service_id': self.service['id'],
+ 'region_id': self.region['id']}
+
+ self.put(url, expected_status=204)
+ self.head(url, expected_status=204)
+
+ self.delete('/services/%(service_id)s' % {
+ 'service_id': self.service['id']})
+
+ self.head(url, expected_status=404)
+
+ def test_service_association_cleanup_when_service_deleted(self):
+ url = ('/policies/%(policy_id)s/OS-ENDPOINT-POLICY'
+ '/services/%(service_id)s') % {
+ 'policy_id': self.policy['id'],
+ 'service_id': self.service['id']}
+
+ self.put(url, expected_status=204)
+ self.get(url, expected_status=204)
+
+ self.delete('/policies/%(policy_id)s' % {
+ 'policy_id': self.policy['id']})
+
+ self.head(url, expected_status=404)
+
+ def test_service_association_cleanup_when_policy_deleted(self):
+ url = ('/policies/%(policy_id)s/OS-ENDPOINT-POLICY'
+ '/services/%(service_id)s') % {
+ 'policy_id': self.policy['id'],
+ 'service_id': self.service['id']}
+
+ self.put(url, expected_status=204)
+ self.get(url, expected_status=204)
+
+ self.delete('/services/%(service_id)s' % {
+ 'service_id': self.service['id']})
+
+ self.head(url, expected_status=404)
+
+
+class JsonHomeTests(TestExtensionCase, test_v3.JsonHomeTestMixin):
+ EXTENSION_LOCATION = ('http://docs.openstack.org/api/openstack-identity/3/'
+ 'ext/OS-ENDPOINT-POLICY/1.0/rel')
+ PARAM_LOCATION = 'http://docs.openstack.org/api/openstack-identity/3/param'
+
+ JSON_HOME_DATA = {
+ EXTENSION_LOCATION + '/endpoint_policy': {
+ 'href-template': '/endpoints/{endpoint_id}/OS-ENDPOINT-POLICY/'
+ 'policy',
+ 'href-vars': {
+ 'endpoint_id': PARAM_LOCATION + '/endpoint_id',
+ },
+ },
+ EXTENSION_LOCATION + '/policy_endpoints': {
+ 'href-template': '/policies/{policy_id}/OS-ENDPOINT-POLICY/'
+ 'endpoints',
+ 'href-vars': {
+ 'policy_id': PARAM_LOCATION + '/policy_id',
+ },
+ },
+ EXTENSION_LOCATION + '/endpoint_policy_association': {
+ 'href-template': '/policies/{policy_id}/OS-ENDPOINT-POLICY/'
+ 'endpoints/{endpoint_id}',
+ 'href-vars': {
+ 'policy_id': PARAM_LOCATION + '/policy_id',
+ 'endpoint_id': PARAM_LOCATION + '/endpoint_id',
+ },
+ },
+ EXTENSION_LOCATION + '/service_policy_association': {
+ 'href-template': '/policies/{policy_id}/OS-ENDPOINT-POLICY/'
+ 'services/{service_id}',
+ 'href-vars': {
+ 'policy_id': PARAM_LOCATION + '/policy_id',
+ 'service_id': PARAM_LOCATION + '/service_id',
+ },
+ },
+ EXTENSION_LOCATION + '/region_and_service_policy_association': {
+ 'href-template': '/policies/{policy_id}/OS-ENDPOINT-POLICY/'
+ 'services/{service_id}/regions/{region_id}',
+ 'href-vars': {
+ 'policy_id': PARAM_LOCATION + '/policy_id',
+ 'service_id': PARAM_LOCATION + '/service_id',
+ 'region_id': PARAM_LOCATION + '/region_id',
+ },
+ },
+ }
diff --git a/keystone-moon/keystone/tests/unit/test_v3_federation.py b/keystone-moon/keystone/tests/unit/test_v3_federation.py
new file mode 100644
index 00000000..3b6f4d8b
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/test_v3_federation.py
@@ -0,0 +1,3296 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import os
+import random
+import subprocess
+import uuid
+
+from lxml import etree
+import mock
+from oslo_config import cfg
+from oslo_log import log
+from oslo_serialization import jsonutils
+from oslotest import mockpatch
+import saml2
+from saml2 import saml
+from saml2 import sigver
+from six.moves import urllib
+import xmldsig
+
+from keystone.auth import controllers as auth_controllers
+from keystone.auth.plugins import mapped
+from keystone.contrib import federation
+from keystone.contrib.federation import controllers as federation_controllers
+from keystone.contrib.federation import idp as keystone_idp
+from keystone.contrib.federation import utils as mapping_utils
+from keystone import exception
+from keystone import notifications
+from keystone.tests.unit import core
+from keystone.tests.unit import federation_fixtures
+from keystone.tests.unit import ksfixtures
+from keystone.tests.unit import mapping_fixtures
+from keystone.tests.unit import test_v3
+from keystone.token.providers import common as token_common
+
+
+CONF = cfg.CONF
+LOG = log.getLogger(__name__)
+ROOTDIR = os.path.dirname(os.path.abspath(__file__))
+XMLDIR = os.path.join(ROOTDIR, 'saml2/')
+
+
+def dummy_validator(*args, **kwargs):
+ pass
+
+
+class FederationTests(test_v3.RestfulTestCase):
+
+ EXTENSION_NAME = 'federation'
+ EXTENSION_TO_ADD = 'federation_extension'
+
+
+class FederatedSetupMixin(object):
+
+ ACTION = 'authenticate'
+ IDP = 'ORG_IDP'
+ PROTOCOL = 'saml2'
+ AUTH_METHOD = 'saml2'
+ USER = 'user@ORGANIZATION'
+ ASSERTION_PREFIX = 'PREFIX_'
+ IDP_WITH_REMOTE = 'ORG_IDP_REMOTE'
+ REMOTE_ID = 'entityID_IDP'
+ REMOTE_ID_ATTR = uuid.uuid4().hex
+
+ UNSCOPED_V3_SAML2_REQ = {
+ "identity": {
+ "methods": [AUTH_METHOD],
+ AUTH_METHOD: {
+ "identity_provider": IDP,
+ "protocol": PROTOCOL
+ }
+ }
+ }
+
+ def _check_domains_are_valid(self, token):
+ self.assertEqual('Federated', token['user']['domain']['id'])
+ self.assertEqual('Federated', token['user']['domain']['name'])
+
+ def _project(self, project):
+ return (project['id'], project['name'])
+
+ def _roles(self, roles):
+ return set([(r['id'], r['name']) for r in roles])
+
+ def _check_projects_and_roles(self, token, roles, projects):
+ """Check whether the projects and the roles match."""
+ token_roles = token.get('roles')
+ if token_roles is None:
+ raise AssertionError('Roles not found in the token')
+ token_roles = self._roles(token_roles)
+ roles_ref = self._roles(roles)
+ self.assertEqual(token_roles, roles_ref)
+
+ token_projects = token.get('project')
+ if token_projects is None:
+ raise AssertionError('Projects not found in the token')
+ token_projects = self._project(token_projects)
+ projects_ref = self._project(projects)
+ self.assertEqual(token_projects, projects_ref)
+
+ def _check_scoped_token_attributes(self, token):
+ def xor_project_domain(iterable):
+ return sum(('project' in iterable, 'domain' in iterable)) % 2
+
+ for obj in ('user', 'catalog', 'expires_at', 'issued_at',
+ 'methods', 'roles'):
+ self.assertIn(obj, token)
+ # Check for either project or domain
+ if not xor_project_domain(token.keys()):
+ raise AssertionError("You must specify either"
+ "project or domain.")
+
+ self.assertIn('OS-FEDERATION', token['user'])
+ os_federation = token['user']['OS-FEDERATION']
+ self.assertEqual(self.IDP, os_federation['identity_provider']['id'])
+ self.assertEqual(self.PROTOCOL, os_federation['protocol']['id'])
+
+ def _issue_unscoped_token(self,
+ idp=None,
+ assertion='EMPLOYEE_ASSERTION',
+ environment=None):
+ api = federation_controllers.Auth()
+ context = {'environment': environment or {}}
+ self._inject_assertion(context, assertion)
+ if idp is None:
+ idp = self.IDP
+ r = api.federated_authentication(context, idp, self.PROTOCOL)
+ return r
+
+ def idp_ref(self, id=None):
+ idp = {
+ 'id': id or uuid.uuid4().hex,
+ 'enabled': True,
+ 'description': uuid.uuid4().hex
+ }
+ return idp
+
+ def proto_ref(self, mapping_id=None):
+ proto = {
+ 'id': uuid.uuid4().hex,
+ 'mapping_id': mapping_id or uuid.uuid4().hex
+ }
+ return proto
+
+ def mapping_ref(self, rules=None):
+ return {
+ 'id': uuid.uuid4().hex,
+ 'rules': rules or self.rules['rules']
+ }
+
+ def _scope_request(self, unscoped_token_id, scope, scope_id):
+ return {
+ 'auth': {
+ 'identity': {
+ 'methods': [
+ self.AUTH_METHOD
+ ],
+ self.AUTH_METHOD: {
+ 'id': unscoped_token_id
+ }
+ },
+ 'scope': {
+ scope: {
+ 'id': scope_id
+ }
+ }
+ }
+ }
+
+ def _inject_assertion(self, context, variant, query_string=None):
+ assertion = getattr(mapping_fixtures, variant)
+ context['environment'].update(assertion)
+ context['query_string'] = query_string or []
+
+ def load_federation_sample_data(self):
+ """Inject additional data."""
+
+ # Create and add domains
+ self.domainA = self.new_domain_ref()
+ self.resource_api.create_domain(self.domainA['id'],
+ self.domainA)
+
+ self.domainB = self.new_domain_ref()
+ self.resource_api.create_domain(self.domainB['id'],
+ self.domainB)
+
+ self.domainC = self.new_domain_ref()
+ self.resource_api.create_domain(self.domainC['id'],
+ self.domainC)
+
+ self.domainD = self.new_domain_ref()
+ self.resource_api.create_domain(self.domainD['id'],
+ self.domainD)
+
+ # Create and add projects
+ self.proj_employees = self.new_project_ref(
+ domain_id=self.domainA['id'])
+ self.resource_api.create_project(self.proj_employees['id'],
+ self.proj_employees)
+ self.proj_customers = self.new_project_ref(
+ domain_id=self.domainA['id'])
+ self.resource_api.create_project(self.proj_customers['id'],
+ self.proj_customers)
+
+ self.project_all = self.new_project_ref(
+ domain_id=self.domainA['id'])
+ self.resource_api.create_project(self.project_all['id'],
+ self.project_all)
+
+ self.project_inherited = self.new_project_ref(
+ domain_id=self.domainD['id'])
+ self.resource_api.create_project(self.project_inherited['id'],
+ self.project_inherited)
+
+ # Create and add groups
+ self.group_employees = self.new_group_ref(
+ domain_id=self.domainA['id'])
+ self.group_employees = (
+ self.identity_api.create_group(self.group_employees))
+
+ self.group_customers = self.new_group_ref(
+ domain_id=self.domainA['id'])
+ self.group_customers = (
+ self.identity_api.create_group(self.group_customers))
+
+ self.group_admins = self.new_group_ref(
+ domain_id=self.domainA['id'])
+ self.group_admins = self.identity_api.create_group(self.group_admins)
+
+ # Create and add roles
+ self.role_employee = self.new_role_ref()
+ self.role_api.create_role(self.role_employee['id'], self.role_employee)
+ self.role_customer = self.new_role_ref()
+ self.role_api.create_role(self.role_customer['id'], self.role_customer)
+
+ self.role_admin = self.new_role_ref()
+ self.role_api.create_role(self.role_admin['id'], self.role_admin)
+
+ # Employees can access
+ # * proj_employees
+ # * project_all
+ self.assignment_api.create_grant(self.role_employee['id'],
+ group_id=self.group_employees['id'],
+ project_id=self.proj_employees['id'])
+ self.assignment_api.create_grant(self.role_employee['id'],
+ group_id=self.group_employees['id'],
+ project_id=self.project_all['id'])
+ # Customers can access
+ # * proj_customers
+ self.assignment_api.create_grant(self.role_customer['id'],
+ group_id=self.group_customers['id'],
+ project_id=self.proj_customers['id'])
+
+ # Admins can access:
+ # * proj_customers
+ # * proj_employees
+ # * project_all
+ self.assignment_api.create_grant(self.role_admin['id'],
+ group_id=self.group_admins['id'],
+ project_id=self.proj_customers['id'])
+ self.assignment_api.create_grant(self.role_admin['id'],
+ group_id=self.group_admins['id'],
+ project_id=self.proj_employees['id'])
+ self.assignment_api.create_grant(self.role_admin['id'],
+ group_id=self.group_admins['id'],
+ project_id=self.project_all['id'])
+
+ self.assignment_api.create_grant(self.role_customer['id'],
+ group_id=self.group_customers['id'],
+ domain_id=self.domainA['id'])
+
+ # Customers can access:
+ # * domain A
+ self.assignment_api.create_grant(self.role_customer['id'],
+ group_id=self.group_customers['id'],
+ domain_id=self.domainA['id'])
+
+ # Customers can access projects via inheritance:
+ # * domain D
+ self.assignment_api.create_grant(self.role_customer['id'],
+ group_id=self.group_customers['id'],
+ domain_id=self.domainD['id'],
+ inherited_to_projects=True)
+
+ # Employees can access:
+ # * domain A
+ # * domain B
+
+ self.assignment_api.create_grant(self.role_employee['id'],
+ group_id=self.group_employees['id'],
+ domain_id=self.domainA['id'])
+ self.assignment_api.create_grant(self.role_employee['id'],
+ group_id=self.group_employees['id'],
+ domain_id=self.domainB['id'])
+
+ # Admins can access:
+ # * domain A
+ # * domain B
+ # * domain C
+ self.assignment_api.create_grant(self.role_admin['id'],
+ group_id=self.group_admins['id'],
+ domain_id=self.domainA['id'])
+ self.assignment_api.create_grant(self.role_admin['id'],
+ group_id=self.group_admins['id'],
+ domain_id=self.domainB['id'])
+
+ self.assignment_api.create_grant(self.role_admin['id'],
+ group_id=self.group_admins['id'],
+ domain_id=self.domainC['id'])
+ self.rules = {
+ 'rules': [
+ {
+ 'local': [
+ {
+ 'group': {
+ 'id': self.group_employees['id']
+ }
+ },
+ {
+ 'user': {
+ 'name': '{0}'
+ }
+ }
+ ],
+ 'remote': [
+ {
+ 'type': 'UserName'
+ },
+ {
+ 'type': 'orgPersonType',
+ 'any_one_of': [
+ 'Employee'
+ ]
+ }
+ ]
+ },
+ {
+ 'local': [
+ {
+ 'group': {
+ 'id': self.group_employees['id']
+ }
+ },
+ {
+ 'user': {
+ 'name': '{0}'
+ }
+ }
+ ],
+ 'remote': [
+ {
+ 'type': self.ASSERTION_PREFIX + 'UserName'
+ },
+ {
+ 'type': self.ASSERTION_PREFIX + 'orgPersonType',
+ 'any_one_of': [
+ 'SuperEmployee'
+ ]
+ }
+ ]
+ },
+ {
+ 'local': [
+ {
+ 'group': {
+ 'id': self.group_customers['id']
+ }
+ },
+ {
+ 'user': {
+ 'name': '{0}'
+ }
+ }
+ ],
+ 'remote': [
+ {
+ 'type': 'UserName'
+ },
+ {
+ 'type': 'orgPersonType',
+ 'any_one_of': [
+ 'Customer'
+ ]
+ }
+ ]
+ },
+ {
+ 'local': [
+ {
+ 'group': {
+ 'id': self.group_admins['id']
+ }
+ },
+ {
+ 'group': {
+ 'id': self.group_employees['id']
+ }
+ },
+ {
+ 'group': {
+ 'id': self.group_customers['id']
+ }
+ },
+
+ {
+ 'user': {
+ 'name': '{0}'
+ }
+ }
+ ],
+ 'remote': [
+ {
+ 'type': 'UserName'
+ },
+ {
+ 'type': 'orgPersonType',
+ 'any_one_of': [
+ 'Admin',
+ 'Chief'
+ ]
+ }
+ ]
+ },
+ {
+ 'local': [
+ {
+ 'group': {
+ 'id': uuid.uuid4().hex
+ }
+ },
+ {
+ 'group': {
+ 'id': self.group_customers['id']
+ }
+ },
+ {
+ 'user': {
+ 'name': '{0}'
+ }
+ }
+ ],
+ 'remote': [
+ {
+ 'type': 'UserName',
+ },
+ {
+ 'type': 'FirstName',
+ 'any_one_of': [
+ 'Jill'
+ ]
+ },
+ {
+ 'type': 'LastName',
+ 'any_one_of': [
+ 'Smith'
+ ]
+ }
+ ]
+ },
+ {
+ 'local': [
+ {
+ 'group': {
+ 'id': 'this_group_no_longer_exists'
+ }
+ },
+ {
+ 'user': {
+ 'name': '{0}'
+ }
+ }
+ ],
+ 'remote': [
+ {
+ 'type': 'UserName',
+ },
+ {
+ 'type': 'Email',
+ 'any_one_of': [
+ 'testacct@example.com'
+ ]
+ },
+ {
+ 'type': 'orgPersonType',
+ 'any_one_of': [
+ 'Tester'
+ ]
+ }
+ ]
+ },
+ # rules with local group names
+ {
+ "local": [
+ {
+ 'user': {
+ 'name': '{0}'
+ }
+ },
+ {
+ "group": {
+ "name": self.group_customers['name'],
+ "domain": {
+ "name": self.domainA['name']
+ }
+ }
+ }
+ ],
+ "remote": [
+ {
+ 'type': 'UserName',
+ },
+ {
+ "type": "orgPersonType",
+ "any_one_of": [
+ "CEO",
+ "CTO"
+ ],
+ }
+ ]
+ },
+ {
+ "local": [
+ {
+ 'user': {
+ 'name': '{0}'
+ }
+ },
+ {
+ "group": {
+ "name": self.group_admins['name'],
+ "domain": {
+ "id": self.domainA['id']
+ }
+ }
+ }
+ ],
+ "remote": [
+ {
+ "type": "UserName",
+ },
+ {
+ "type": "orgPersonType",
+ "any_one_of": [
+ "Managers"
+ ]
+ }
+ ]
+ },
+ {
+ "local": [
+ {
+ "user": {
+ "name": "{0}"
+ }
+ },
+ {
+ "group": {
+ "name": "NON_EXISTING",
+ "domain": {
+ "id": self.domainA['id']
+ }
+ }
+ }
+ ],
+ "remote": [
+ {
+ "type": "UserName",
+ },
+ {
+ "type": "UserName",
+ "any_one_of": [
+ "IamTester"
+ ]
+ }
+ ]
+ },
+ {
+ "local": [
+ {
+ "user": {
+ "type": "local",
+ "name": self.user['name'],
+ "domain": {
+ "id": self.user['domain_id']
+ }
+ }
+ },
+ {
+ "group": {
+ "id": self.group_customers['id']
+ }
+ }
+ ],
+ "remote": [
+ {
+ "type": "UserType",
+ "any_one_of": [
+ "random"
+ ]
+ }
+ ]
+ },
+ {
+ "local": [
+ {
+ "user": {
+ "type": "local",
+ "name": self.user['name'],
+ "domain": {
+ "id": uuid.uuid4().hex
+ }
+ }
+ }
+ ],
+ "remote": [
+ {
+ "type": "Position",
+ "any_one_of": [
+ "DirectorGeneral"
+ ]
+ }
+ ]
+ }
+ ]
+ }
+
+ # Add IDP
+ self.idp = self.idp_ref(id=self.IDP)
+ self.federation_api.create_idp(self.idp['id'],
+ self.idp)
+ # Add IDP with remote
+ self.idp_with_remote = self.idp_ref(id=self.IDP_WITH_REMOTE)
+ self.idp_with_remote['remote_id'] = self.REMOTE_ID
+ self.federation_api.create_idp(self.idp_with_remote['id'],
+ self.idp_with_remote)
+ # Add a mapping
+ self.mapping = self.mapping_ref()
+ self.federation_api.create_mapping(self.mapping['id'],
+ self.mapping)
+ # Add protocols
+ self.proto_saml = self.proto_ref(mapping_id=self.mapping['id'])
+ self.proto_saml['id'] = self.PROTOCOL
+ self.federation_api.create_protocol(self.idp['id'],
+ self.proto_saml['id'],
+ self.proto_saml)
+ # Add protocols IDP with remote
+ self.federation_api.create_protocol(self.idp_with_remote['id'],
+ self.proto_saml['id'],
+ self.proto_saml)
+ # Generate fake tokens
+ context = {'environment': {}}
+
+ self.tokens = {}
+ VARIANTS = ('EMPLOYEE_ASSERTION', 'CUSTOMER_ASSERTION',
+ 'ADMIN_ASSERTION')
+ api = auth_controllers.Auth()
+ for variant in VARIANTS:
+ self._inject_assertion(context, variant)
+ r = api.authenticate_for_token(context, self.UNSCOPED_V3_SAML2_REQ)
+ self.tokens[variant] = r.headers.get('X-Subject-Token')
+
+ self.TOKEN_SCOPE_PROJECT_FROM_NONEXISTENT_TOKEN = self._scope_request(
+ uuid.uuid4().hex, 'project', self.proj_customers['id'])
+
+ self.TOKEN_SCOPE_PROJECT_EMPLOYEE_FROM_EMPLOYEE = self._scope_request(
+ self.tokens['EMPLOYEE_ASSERTION'], 'project',
+ self.proj_employees['id'])
+
+ self.TOKEN_SCOPE_PROJECT_EMPLOYEE_FROM_ADMIN = self._scope_request(
+ self.tokens['ADMIN_ASSERTION'], 'project',
+ self.proj_employees['id'])
+
+ self.TOKEN_SCOPE_PROJECT_CUSTOMER_FROM_ADMIN = self._scope_request(
+ self.tokens['ADMIN_ASSERTION'], 'project',
+ self.proj_customers['id'])
+
+ self.TOKEN_SCOPE_PROJECT_EMPLOYEE_FROM_CUSTOMER = self._scope_request(
+ self.tokens['CUSTOMER_ASSERTION'], 'project',
+ self.proj_employees['id'])
+
+ self.TOKEN_SCOPE_PROJECT_INHERITED_FROM_CUSTOMER = self._scope_request(
+ self.tokens['CUSTOMER_ASSERTION'], 'project',
+ self.project_inherited['id'])
+
+ self.TOKEN_SCOPE_DOMAIN_A_FROM_CUSTOMER = self._scope_request(
+ self.tokens['CUSTOMER_ASSERTION'], 'domain', self.domainA['id'])
+
+ self.TOKEN_SCOPE_DOMAIN_B_FROM_CUSTOMER = self._scope_request(
+ self.tokens['CUSTOMER_ASSERTION'], 'domain',
+ self.domainB['id'])
+
+ self.TOKEN_SCOPE_DOMAIN_D_FROM_CUSTOMER = self._scope_request(
+ self.tokens['CUSTOMER_ASSERTION'], 'domain', self.domainD['id'])
+
+ self.TOKEN_SCOPE_DOMAIN_A_FROM_ADMIN = self._scope_request(
+ self.tokens['ADMIN_ASSERTION'], 'domain', self.domainA['id'])
+
+ self.TOKEN_SCOPE_DOMAIN_B_FROM_ADMIN = self._scope_request(
+ self.tokens['ADMIN_ASSERTION'], 'domain', self.domainB['id'])
+
+ self.TOKEN_SCOPE_DOMAIN_C_FROM_ADMIN = self._scope_request(
+ self.tokens['ADMIN_ASSERTION'], 'domain',
+ self.domainC['id'])
+
+
+class FederatedIdentityProviderTests(FederationTests):
+ """A test class for Identity Providers."""
+
+ idp_keys = ['description', 'enabled']
+
+ default_body = {'description': None, 'enabled': True}
+
+ def base_url(self, suffix=None):
+ if suffix is not None:
+ return '/OS-FEDERATION/identity_providers/' + str(suffix)
+ return '/OS-FEDERATION/identity_providers'
+
+ def _fetch_attribute_from_response(self, resp, parameter,
+ assert_is_not_none=True):
+ """Fetch single attribute from TestResponse object."""
+ result = resp.result.get(parameter)
+ if assert_is_not_none:
+ self.assertIsNotNone(result)
+ return result
+
+ def _create_and_decapsulate_response(self, body=None):
+ """Create IdP and fetch it's random id along with entity."""
+ default_resp = self._create_default_idp(body=body)
+ idp = self._fetch_attribute_from_response(default_resp,
+ 'identity_provider')
+ self.assertIsNotNone(idp)
+ idp_id = idp.get('id')
+ return (idp_id, idp)
+
+ def _get_idp(self, idp_id):
+ """Fetch IdP entity based on its id."""
+ url = self.base_url(suffix=idp_id)
+ resp = self.get(url)
+ return resp
+
+ def _create_default_idp(self, body=None):
+ """Create default IdP."""
+ url = self.base_url(suffix=uuid.uuid4().hex)
+ if body is None:
+ body = self._http_idp_input()
+ resp = self.put(url, body={'identity_provider': body},
+ expected_status=201)
+ return resp
+
+ def _http_idp_input(self, **kwargs):
+ """Create default input for IdP data."""
+ body = None
+ if 'body' not in kwargs:
+ body = self.default_body.copy()
+ body['description'] = uuid.uuid4().hex
+ else:
+ body = kwargs['body']
+ return body
+
+ def _assign_protocol_to_idp(self, idp_id=None, proto=None, url=None,
+ mapping_id=None, validate=True, **kwargs):
+ if url is None:
+ url = self.base_url(suffix='%(idp_id)s/protocols/%(protocol_id)s')
+ if idp_id is None:
+ idp_id, _ = self._create_and_decapsulate_response()
+ if proto is None:
+ proto = uuid.uuid4().hex
+ if mapping_id is None:
+ mapping_id = uuid.uuid4().hex
+ body = {'mapping_id': mapping_id}
+ url = url % {'idp_id': idp_id, 'protocol_id': proto}
+ resp = self.put(url, body={'protocol': body}, **kwargs)
+ if validate:
+ self.assertValidResponse(resp, 'protocol', dummy_validator,
+ keys_to_check=['id', 'mapping_id'],
+ ref={'id': proto,
+ 'mapping_id': mapping_id})
+ return (resp, idp_id, proto)
+
+ def _get_protocol(self, idp_id, protocol_id):
+ url = "%s/protocols/%s" % (idp_id, protocol_id)
+ url = self.base_url(suffix=url)
+ r = self.get(url)
+ return r
+
+ def test_create_idp(self):
+ """Creates the IdentityProvider entity."""
+
+ keys_to_check = self.idp_keys
+ body = self._http_idp_input()
+ resp = self._create_default_idp(body=body)
+ self.assertValidResponse(resp, 'identity_provider', dummy_validator,
+ keys_to_check=keys_to_check,
+ ref=body)
+
+ def test_create_idp_remote(self):
+ """Creates the IdentityProvider entity associated to a remote_id."""
+
+ keys_to_check = list(self.idp_keys)
+ keys_to_check.append('remote_id')
+ body = self.default_body.copy()
+ body['description'] = uuid.uuid4().hex
+ body['remote_id'] = uuid.uuid4().hex
+ resp = self._create_default_idp(body=body)
+ self.assertValidResponse(resp, 'identity_provider', dummy_validator,
+ keys_to_check=keys_to_check,
+ ref=body)
+
+ def test_list_idps(self, iterations=5):
+ """Lists all available IdentityProviders.
+
+ This test collects ids of created IdPs and
+ intersects it with the list of all available IdPs.
+ List of all IdPs can be a superset of IdPs created in this test,
+ because other tests also create IdPs.
+
+ """
+ def get_id(resp):
+ r = self._fetch_attribute_from_response(resp,
+ 'identity_provider')
+ return r.get('id')
+
+ ids = []
+ for _ in range(iterations):
+ id = get_id(self._create_default_idp())
+ ids.append(id)
+ ids = set(ids)
+
+ keys_to_check = self.idp_keys
+ url = self.base_url()
+ resp = self.get(url)
+ self.assertValidListResponse(resp, 'identity_providers',
+ dummy_validator,
+ keys_to_check=keys_to_check)
+ entities = self._fetch_attribute_from_response(resp,
+ 'identity_providers')
+ entities_ids = set([e['id'] for e in entities])
+ ids_intersection = entities_ids.intersection(ids)
+ self.assertEqual(ids_intersection, ids)
+
+ def test_check_idp_uniqueness(self):
+ """Add same IdP twice.
+
+ Expect HTTP 409 code for the latter call.
+
+ """
+ url = self.base_url(suffix=uuid.uuid4().hex)
+ body = self._http_idp_input()
+ self.put(url, body={'identity_provider': body},
+ expected_status=201)
+ self.put(url, body={'identity_provider': body},
+ expected_status=409)
+
+ def test_get_idp(self):
+ """Create and later fetch IdP."""
+ body = self._http_idp_input()
+ default_resp = self._create_default_idp(body=body)
+ default_idp = self._fetch_attribute_from_response(default_resp,
+ 'identity_provider')
+ idp_id = default_idp.get('id')
+ url = self.base_url(suffix=idp_id)
+ resp = self.get(url)
+ self.assertValidResponse(resp, 'identity_provider',
+ dummy_validator, keys_to_check=body.keys(),
+ ref=body)
+
+ def test_get_nonexisting_idp(self):
+ """Fetch nonexisting IdP entity.
+
+ Expected HTTP 404 status code.
+
+ """
+ idp_id = uuid.uuid4().hex
+ self.assertIsNotNone(idp_id)
+
+ url = self.base_url(suffix=idp_id)
+ self.get(url, expected_status=404)
+
+ def test_delete_existing_idp(self):
+ """Create and later delete IdP.
+
+ Expect HTTP 404 for the GET IdP call.
+ """
+ default_resp = self._create_default_idp()
+ default_idp = self._fetch_attribute_from_response(default_resp,
+ 'identity_provider')
+ idp_id = default_idp.get('id')
+ self.assertIsNotNone(idp_id)
+ url = self.base_url(suffix=idp_id)
+ self.delete(url)
+ self.get(url, expected_status=404)
+
+ def test_delete_nonexisting_idp(self):
+ """Delete nonexisting IdP.
+
+ Expect HTTP 404 for the GET IdP call.
+ """
+ idp_id = uuid.uuid4().hex
+ url = self.base_url(suffix=idp_id)
+ self.delete(url, expected_status=404)
+
+ def test_update_idp_mutable_attributes(self):
+ """Update IdP's mutable parameters."""
+ default_resp = self._create_default_idp()
+ default_idp = self._fetch_attribute_from_response(default_resp,
+ 'identity_provider')
+ idp_id = default_idp.get('id')
+ url = self.base_url(suffix=idp_id)
+ self.assertIsNotNone(idp_id)
+
+ _enabled = not default_idp.get('enabled')
+ body = {'remote_id': uuid.uuid4().hex,
+ 'description': uuid.uuid4().hex,
+ 'enabled': _enabled}
+
+ body = {'identity_provider': body}
+ resp = self.patch(url, body=body)
+ updated_idp = self._fetch_attribute_from_response(resp,
+ 'identity_provider')
+ body = body['identity_provider']
+ for key in body.keys():
+ self.assertEqual(body[key], updated_idp.get(key))
+
+ resp = self.get(url)
+ updated_idp = self._fetch_attribute_from_response(resp,
+ 'identity_provider')
+ for key in body.keys():
+ self.assertEqual(body[key], updated_idp.get(key))
+
+ def test_update_idp_immutable_attributes(self):
+ """Update IdP's immutable parameters.
+
+ Expect HTTP 403 code.
+
+ """
+ default_resp = self._create_default_idp()
+ default_idp = self._fetch_attribute_from_response(default_resp,
+ 'identity_provider')
+ idp_id = default_idp.get('id')
+ self.assertIsNotNone(idp_id)
+
+ body = self._http_idp_input()
+ body['id'] = uuid.uuid4().hex
+ body['protocols'] = [uuid.uuid4().hex, uuid.uuid4().hex]
+
+ url = self.base_url(suffix=idp_id)
+ self.patch(url, body={'identity_provider': body}, expected_status=403)
+
+ def test_update_nonexistent_idp(self):
+ """Update nonexistent IdP
+
+ Expect HTTP 404 code.
+
+ """
+ idp_id = uuid.uuid4().hex
+ url = self.base_url(suffix=idp_id)
+ body = self._http_idp_input()
+ body['enabled'] = False
+ body = {'identity_provider': body}
+
+ self.patch(url, body=body, expected_status=404)
+
+ def test_assign_protocol_to_idp(self):
+ """Assign a protocol to existing IdP."""
+
+ self._assign_protocol_to_idp(expected_status=201)
+
+ def test_protocol_composite_pk(self):
+ """Test whether Keystone let's add two entities with identical
+ names, however attached to different IdPs.
+
+ 1. Add IdP and assign it protocol with predefined name
+ 2. Add another IdP and assign it a protocol with same name.
+
+ Expect HTTP 201 code
+
+ """
+ url = self.base_url(suffix='%(idp_id)s/protocols/%(protocol_id)s')
+
+ kwargs = {'expected_status': 201}
+ self._assign_protocol_to_idp(proto='saml2',
+ url=url, **kwargs)
+
+ self._assign_protocol_to_idp(proto='saml2',
+ url=url, **kwargs)
+
+ def test_protocol_idp_pk_uniqueness(self):
+ """Test whether Keystone checks for unique idp/protocol values.
+
+ Add same protocol twice, expect Keystone to reject a latter call and
+ return HTTP 409 code.
+
+ """
+ url = self.base_url(suffix='%(idp_id)s/protocols/%(protocol_id)s')
+
+ kwargs = {'expected_status': 201}
+ resp, idp_id, proto = self._assign_protocol_to_idp(proto='saml2',
+ url=url, **kwargs)
+ kwargs = {'expected_status': 409}
+ resp, idp_id, proto = self._assign_protocol_to_idp(idp_id=idp_id,
+ proto='saml2',
+ validate=False,
+ url=url, **kwargs)
+
+ def test_assign_protocol_to_nonexistent_idp(self):
+ """Assign protocol to IdP that doesn't exist.
+
+ Expect HTTP 404 code.
+
+ """
+
+ idp_id = uuid.uuid4().hex
+ kwargs = {'expected_status': 404}
+ self._assign_protocol_to_idp(proto='saml2',
+ idp_id=idp_id,
+ validate=False,
+ **kwargs)
+
+ def test_get_protocol(self):
+ """Create and later fetch protocol tied to IdP."""
+
+ resp, idp_id, proto = self._assign_protocol_to_idp(expected_status=201)
+ proto_id = self._fetch_attribute_from_response(resp, 'protocol')['id']
+ url = "%s/protocols/%s" % (idp_id, proto_id)
+ url = self.base_url(suffix=url)
+
+ resp = self.get(url)
+
+ reference = {'id': proto_id}
+ self.assertValidResponse(resp, 'protocol',
+ dummy_validator,
+ keys_to_check=reference.keys(),
+ ref=reference)
+
+ def test_list_protocols(self):
+ """Create set of protocols and later list them.
+
+ Compare input and output id sets.
+
+ """
+ resp, idp_id, proto = self._assign_protocol_to_idp(expected_status=201)
+ iterations = random.randint(0, 16)
+ protocol_ids = []
+ for _ in range(iterations):
+ resp, _, proto = self._assign_protocol_to_idp(idp_id=idp_id,
+ expected_status=201)
+ proto_id = self._fetch_attribute_from_response(resp, 'protocol')
+ proto_id = proto_id['id']
+ protocol_ids.append(proto_id)
+
+ url = "%s/protocols" % idp_id
+ url = self.base_url(suffix=url)
+ resp = self.get(url)
+ self.assertValidListResponse(resp, 'protocols',
+ dummy_validator,
+ keys_to_check=['id'])
+ entities = self._fetch_attribute_from_response(resp, 'protocols')
+ entities = set([entity['id'] for entity in entities])
+ protocols_intersection = entities.intersection(protocol_ids)
+ self.assertEqual(protocols_intersection, set(protocol_ids))
+
+ def test_update_protocols_attribute(self):
+ """Update protocol's attribute."""
+
+ resp, idp_id, proto = self._assign_protocol_to_idp(expected_status=201)
+ new_mapping_id = uuid.uuid4().hex
+
+ url = "%s/protocols/%s" % (idp_id, proto)
+ url = self.base_url(suffix=url)
+ body = {'mapping_id': new_mapping_id}
+ resp = self.patch(url, body={'protocol': body})
+ self.assertValidResponse(resp, 'protocol', dummy_validator,
+ keys_to_check=['id', 'mapping_id'],
+ ref={'id': proto,
+ 'mapping_id': new_mapping_id}
+ )
+
+ def test_delete_protocol(self):
+ """Delete protocol.
+
+ Expect HTTP 404 code for the GET call after the protocol is deleted.
+
+ """
+ url = self.base_url(suffix='/%(idp_id)s/'
+ 'protocols/%(protocol_id)s')
+ resp, idp_id, proto = self._assign_protocol_to_idp(expected_status=201)
+ url = url % {'idp_id': idp_id,
+ 'protocol_id': proto}
+ self.delete(url)
+ self.get(url, expected_status=404)
+
+
+class MappingCRUDTests(FederationTests):
+ """A class for testing CRUD operations for Mappings."""
+
+ MAPPING_URL = '/OS-FEDERATION/mappings/'
+
+ def assertValidMappingListResponse(self, resp, *args, **kwargs):
+ return self.assertValidListResponse(
+ resp,
+ 'mappings',
+ self.assertValidMapping,
+ keys_to_check=[],
+ *args,
+ **kwargs)
+
+ def assertValidMappingResponse(self, resp, *args, **kwargs):
+ return self.assertValidResponse(
+ resp,
+ 'mapping',
+ self.assertValidMapping,
+ keys_to_check=[],
+ *args,
+ **kwargs)
+
+ def assertValidMapping(self, entity, ref=None):
+ self.assertIsNotNone(entity.get('id'))
+ self.assertIsNotNone(entity.get('rules'))
+ if ref:
+ self.assertEqual(jsonutils.loads(entity['rules']), ref['rules'])
+ return entity
+
+ def _create_default_mapping_entry(self):
+ url = self.MAPPING_URL + uuid.uuid4().hex
+ resp = self.put(url,
+ body={'mapping': mapping_fixtures.MAPPING_LARGE},
+ expected_status=201)
+ return resp
+
+ def _get_id_from_response(self, resp):
+ r = resp.result.get('mapping')
+ return r.get('id')
+
+ def test_mapping_create(self):
+ resp = self._create_default_mapping_entry()
+ self.assertValidMappingResponse(resp, mapping_fixtures.MAPPING_LARGE)
+
+ def test_mapping_list(self):
+ url = self.MAPPING_URL
+ self._create_default_mapping_entry()
+ resp = self.get(url)
+ entities = resp.result.get('mappings')
+ self.assertIsNotNone(entities)
+ self.assertResponseStatus(resp, 200)
+ self.assertValidListLinks(resp.result.get('links'))
+ self.assertEqual(1, len(entities))
+
+ def test_mapping_delete(self):
+ url = self.MAPPING_URL + '%(mapping_id)s'
+ resp = self._create_default_mapping_entry()
+ mapping_id = self._get_id_from_response(resp)
+ url = url % {'mapping_id': str(mapping_id)}
+ resp = self.delete(url)
+ self.assertResponseStatus(resp, 204)
+ self.get(url, expected_status=404)
+
+ def test_mapping_get(self):
+ url = self.MAPPING_URL + '%(mapping_id)s'
+ resp = self._create_default_mapping_entry()
+ mapping_id = self._get_id_from_response(resp)
+ url = url % {'mapping_id': mapping_id}
+ resp = self.get(url)
+ self.assertValidMappingResponse(resp, mapping_fixtures.MAPPING_LARGE)
+
+ def test_mapping_update(self):
+ url = self.MAPPING_URL + '%(mapping_id)s'
+ resp = self._create_default_mapping_entry()
+ mapping_id = self._get_id_from_response(resp)
+ url = url % {'mapping_id': mapping_id}
+ resp = self.patch(url,
+ body={'mapping': mapping_fixtures.MAPPING_SMALL})
+ self.assertValidMappingResponse(resp, mapping_fixtures.MAPPING_SMALL)
+ resp = self.get(url)
+ self.assertValidMappingResponse(resp, mapping_fixtures.MAPPING_SMALL)
+
+ def test_delete_mapping_dne(self):
+ url = self.MAPPING_URL + uuid.uuid4().hex
+ self.delete(url, expected_status=404)
+
+ def test_get_mapping_dne(self):
+ url = self.MAPPING_URL + uuid.uuid4().hex
+ self.get(url, expected_status=404)
+
+ def test_create_mapping_bad_requirements(self):
+ url = self.MAPPING_URL + uuid.uuid4().hex
+ self.put(url, expected_status=400,
+ body={'mapping': mapping_fixtures.MAPPING_BAD_REQ})
+
+ def test_create_mapping_no_rules(self):
+ url = self.MAPPING_URL + uuid.uuid4().hex
+ self.put(url, expected_status=400,
+ body={'mapping': mapping_fixtures.MAPPING_NO_RULES})
+
+ def test_create_mapping_no_remote_objects(self):
+ url = self.MAPPING_URL + uuid.uuid4().hex
+ self.put(url, expected_status=400,
+ body={'mapping': mapping_fixtures.MAPPING_NO_REMOTE})
+
+ def test_create_mapping_bad_value(self):
+ url = self.MAPPING_URL + uuid.uuid4().hex
+ self.put(url, expected_status=400,
+ body={'mapping': mapping_fixtures.MAPPING_BAD_VALUE})
+
+ def test_create_mapping_missing_local(self):
+ url = self.MAPPING_URL + uuid.uuid4().hex
+ self.put(url, expected_status=400,
+ body={'mapping': mapping_fixtures.MAPPING_MISSING_LOCAL})
+
+ def test_create_mapping_missing_type(self):
+ url = self.MAPPING_URL + uuid.uuid4().hex
+ self.put(url, expected_status=400,
+ body={'mapping': mapping_fixtures.MAPPING_MISSING_TYPE})
+
+ def test_create_mapping_wrong_type(self):
+ url = self.MAPPING_URL + uuid.uuid4().hex
+ self.put(url, expected_status=400,
+ body={'mapping': mapping_fixtures.MAPPING_WRONG_TYPE})
+
+ def test_create_mapping_extra_remote_properties_not_any_of(self):
+ url = self.MAPPING_URL + uuid.uuid4().hex
+ mapping = mapping_fixtures.MAPPING_EXTRA_REMOTE_PROPS_NOT_ANY_OF
+ self.put(url, expected_status=400, body={'mapping': mapping})
+
+ def test_create_mapping_extra_remote_properties_any_one_of(self):
+ url = self.MAPPING_URL + uuid.uuid4().hex
+ mapping = mapping_fixtures.MAPPING_EXTRA_REMOTE_PROPS_ANY_ONE_OF
+ self.put(url, expected_status=400, body={'mapping': mapping})
+
+ def test_create_mapping_extra_remote_properties_just_type(self):
+ url = self.MAPPING_URL + uuid.uuid4().hex
+ mapping = mapping_fixtures.MAPPING_EXTRA_REMOTE_PROPS_JUST_TYPE
+ self.put(url, expected_status=400, body={'mapping': mapping})
+
+ def test_create_mapping_empty_map(self):
+ url = self.MAPPING_URL + uuid.uuid4().hex
+ self.put(url, expected_status=400,
+ body={'mapping': {}})
+
+ def test_create_mapping_extra_rules_properties(self):
+ url = self.MAPPING_URL + uuid.uuid4().hex
+ self.put(url, expected_status=400,
+ body={'mapping': mapping_fixtures.MAPPING_EXTRA_RULES_PROPS})
+
+ def test_create_mapping_with_blacklist_and_whitelist(self):
+ """Test for adding whitelist and blacklist in the rule
+
+ Server should respond with HTTP 400 error upon discovering both
+ ``whitelist`` and ``blacklist`` keywords in the same rule.
+
+ """
+ url = self.MAPPING_URL + uuid.uuid4().hex
+ mapping = mapping_fixtures.MAPPING_GROUPS_WHITELIST_AND_BLACKLIST
+ self.put(url, expected_status=400, body={'mapping': mapping})
+
+
+class MappingRuleEngineTests(FederationTests):
+ """A class for testing the mapping rule engine."""
+
+ def assertValidMappedUserObject(self, mapped_properties,
+ user_type='ephemeral',
+ domain_id=None):
+ """Check whether mapped properties object has 'user' within.
+
+ According to today's rules, RuleProcessor does not have to issue user's
+ id or name. What's actually required is user's type and for ephemeral
+ users that would be service domain named 'Federated'.
+ """
+ self.assertIn('user', mapped_properties,
+ message='Missing user object in mapped properties')
+ user = mapped_properties['user']
+ self.assertIn('type', user)
+ self.assertEqual(user_type, user['type'])
+ self.assertIn('domain', user)
+ domain = user['domain']
+ domain_name_or_id = domain.get('id') or domain.get('name')
+ domain_ref = domain_id or federation.FEDERATED_DOMAIN_KEYWORD
+ self.assertEqual(domain_ref, domain_name_or_id)
+
+ def test_rule_engine_any_one_of_and_direct_mapping(self):
+ """Should return user's name and group id EMPLOYEE_GROUP_ID.
+
+ The ADMIN_ASSERTION should successfully have a match in MAPPING_LARGE.
+ They will test the case where `any_one_of` is valid, and there is
+ a direct mapping for the users name.
+
+ """
+
+ mapping = mapping_fixtures.MAPPING_LARGE
+ assertion = mapping_fixtures.ADMIN_ASSERTION
+ rp = mapping_utils.RuleProcessor(mapping['rules'])
+ values = rp.process(assertion)
+
+ fn = assertion.get('FirstName')
+ ln = assertion.get('LastName')
+ full_name = '%s %s' % (fn, ln)
+ group_ids = values.get('group_ids')
+ user_name = values.get('user', {}).get('name')
+
+ self.assertIn(mapping_fixtures.EMPLOYEE_GROUP_ID, group_ids)
+ self.assertEqual(full_name, user_name)
+
+ def test_rule_engine_no_regex_match(self):
+ """Should deny authorization, the email of the tester won't match.
+
+ This will not match since the email in the assertion will fail
+ the regex test. It is set to match any @example.com address.
+ But the incoming value is set to eviltester@example.org.
+ RuleProcessor should return list of empty group_ids.
+
+ """
+
+ mapping = mapping_fixtures.MAPPING_LARGE
+ assertion = mapping_fixtures.BAD_TESTER_ASSERTION
+ rp = mapping_utils.RuleProcessor(mapping['rules'])
+ mapped_properties = rp.process(assertion)
+
+ self.assertValidMappedUserObject(mapped_properties)
+ self.assertIsNone(mapped_properties['user'].get('name'))
+ self.assertListEqual(list(), mapped_properties['group_ids'])
+
+ def test_rule_engine_regex_many_groups(self):
+ """Should return group CONTRACTOR_GROUP_ID.
+
+ The TESTER_ASSERTION should successfully have a match in
+ MAPPING_TESTER_REGEX. This will test the case where many groups
+ are in the assertion, and a regex value is used to try and find
+ a match.
+
+ """
+
+ mapping = mapping_fixtures.MAPPING_TESTER_REGEX
+ assertion = mapping_fixtures.TESTER_ASSERTION
+ rp = mapping_utils.RuleProcessor(mapping['rules'])
+ values = rp.process(assertion)
+
+ self.assertValidMappedUserObject(values)
+ user_name = assertion.get('UserName')
+ group_ids = values.get('group_ids')
+ name = values.get('user', {}).get('name')
+
+ self.assertEqual(user_name, name)
+ self.assertIn(mapping_fixtures.TESTER_GROUP_ID, group_ids)
+
+ def test_rule_engine_any_one_of_many_rules(self):
+ """Should return group CONTRACTOR_GROUP_ID.
+
+ The CONTRACTOR_ASSERTION should successfully have a match in
+ MAPPING_SMALL. This will test the case where many rules
+ must be matched, including an `any_one_of`, and a direct
+ mapping.
+
+ """
+
+ mapping = mapping_fixtures.MAPPING_SMALL
+ assertion = mapping_fixtures.CONTRACTOR_ASSERTION
+ rp = mapping_utils.RuleProcessor(mapping['rules'])
+ values = rp.process(assertion)
+
+ self.assertValidMappedUserObject(values)
+ user_name = assertion.get('UserName')
+ group_ids = values.get('group_ids')
+ name = values.get('user', {}).get('name')
+
+ self.assertEqual(user_name, name)
+ self.assertIn(mapping_fixtures.CONTRACTOR_GROUP_ID, group_ids)
+
+ def test_rule_engine_not_any_of_and_direct_mapping(self):
+ """Should return user's name and email.
+
+ The CUSTOMER_ASSERTION should successfully have a match in
+ MAPPING_LARGE. This will test the case where a requirement
+ has `not_any_of`, and direct mapping to a username, no group.
+
+ """
+
+ mapping = mapping_fixtures.MAPPING_LARGE
+ assertion = mapping_fixtures.CUSTOMER_ASSERTION
+ rp = mapping_utils.RuleProcessor(mapping['rules'])
+ values = rp.process(assertion)
+
+ self.assertValidMappedUserObject(values)
+ user_name = assertion.get('UserName')
+ group_ids = values.get('group_ids')
+ name = values.get('user', {}).get('name')
+
+ self.assertEqual(user_name, name)
+ self.assertEqual([], group_ids,)
+
+ def test_rule_engine_not_any_of_many_rules(self):
+ """Should return group EMPLOYEE_GROUP_ID.
+
+ The EMPLOYEE_ASSERTION should successfully have a match in
+ MAPPING_SMALL. This will test the case where many remote
+ rules must be matched, including a `not_any_of`.
+
+ """
+
+ mapping = mapping_fixtures.MAPPING_SMALL
+ assertion = mapping_fixtures.EMPLOYEE_ASSERTION
+ rp = mapping_utils.RuleProcessor(mapping['rules'])
+ values = rp.process(assertion)
+
+ self.assertValidMappedUserObject(values)
+ user_name = assertion.get('UserName')
+ group_ids = values.get('group_ids')
+ name = values.get('user', {}).get('name')
+
+ self.assertEqual(user_name, name)
+ self.assertIn(mapping_fixtures.EMPLOYEE_GROUP_ID, group_ids)
+
+ def test_rule_engine_not_any_of_regex_verify_pass(self):
+ """Should return group DEVELOPER_GROUP_ID.
+
+ The DEVELOPER_ASSERTION should successfully have a match in
+ MAPPING_DEVELOPER_REGEX. This will test the case where many
+ remote rules must be matched, including a `not_any_of`, with
+ regex set to True.
+
+ """
+
+ mapping = mapping_fixtures.MAPPING_DEVELOPER_REGEX
+ assertion = mapping_fixtures.DEVELOPER_ASSERTION
+ rp = mapping_utils.RuleProcessor(mapping['rules'])
+ values = rp.process(assertion)
+
+ self.assertValidMappedUserObject(values)
+ user_name = assertion.get('UserName')
+ group_ids = values.get('group_ids')
+ name = values.get('user', {}).get('name')
+
+ self.assertEqual(user_name, name)
+ self.assertIn(mapping_fixtures.DEVELOPER_GROUP_ID, group_ids)
+
+ def test_rule_engine_not_any_of_regex_verify_fail(self):
+ """Should deny authorization.
+
+ The email in the assertion will fail the regex test.
+ It is set to reject any @example.org address, but the
+ incoming value is set to evildeveloper@example.org.
+ RuleProcessor should return list of empty group_ids.
+
+ """
+
+ mapping = mapping_fixtures.MAPPING_DEVELOPER_REGEX
+ assertion = mapping_fixtures.BAD_DEVELOPER_ASSERTION
+ rp = mapping_utils.RuleProcessor(mapping['rules'])
+ mapped_properties = rp.process(assertion)
+
+ self.assertValidMappedUserObject(mapped_properties)
+ self.assertIsNone(mapped_properties['user'].get('name'))
+ self.assertListEqual(list(), mapped_properties['group_ids'])
+
+ def _rule_engine_regex_match_and_many_groups(self, assertion):
+ """Should return group DEVELOPER_GROUP_ID and TESTER_GROUP_ID.
+
+ A helper function injecting assertion passed as an argument.
+ Expect DEVELOPER_GROUP_ID and TESTER_GROUP_ID in the results.
+
+ """
+
+ mapping = mapping_fixtures.MAPPING_LARGE
+ rp = mapping_utils.RuleProcessor(mapping['rules'])
+ values = rp.process(assertion)
+
+ user_name = assertion.get('UserName')
+ group_ids = values.get('group_ids')
+ name = values.get('user', {}).get('name')
+
+ self.assertValidMappedUserObject(values)
+ self.assertEqual(user_name, name)
+ self.assertIn(mapping_fixtures.DEVELOPER_GROUP_ID, group_ids)
+ self.assertIn(mapping_fixtures.TESTER_GROUP_ID, group_ids)
+
+ def test_rule_engine_regex_match_and_many_groups(self):
+ """Should return group DEVELOPER_GROUP_ID and TESTER_GROUP_ID.
+
+ The TESTER_ASSERTION should successfully have a match in
+ MAPPING_LARGE. This will test a successful regex match
+ for an `any_one_of` evaluation type, and will have many
+ groups returned.
+
+ """
+ self._rule_engine_regex_match_and_many_groups(
+ mapping_fixtures.TESTER_ASSERTION)
+
+ def test_rule_engine_discards_nonstring_objects(self):
+ """Check whether RuleProcessor discards non string objects.
+
+ Despite the fact that assertion is malformed and contains
+ non string objects, RuleProcessor should correctly discard them and
+ successfully have a match in MAPPING_LARGE.
+
+ """
+ self._rule_engine_regex_match_and_many_groups(
+ mapping_fixtures.MALFORMED_TESTER_ASSERTION)
+
+ def test_rule_engine_fails_after_discarding_nonstring(self):
+ """Check whether RuleProcessor discards non string objects.
+
+ Expect RuleProcessor to discard non string object, which
+ is required for a correct rule match. RuleProcessor will result with
+ empty list of groups.
+
+ """
+ mapping = mapping_fixtures.MAPPING_SMALL
+ rp = mapping_utils.RuleProcessor(mapping['rules'])
+ assertion = mapping_fixtures.CONTRACTOR_MALFORMED_ASSERTION
+ mapped_properties = rp.process(assertion)
+ self.assertValidMappedUserObject(mapped_properties)
+ self.assertIsNone(mapped_properties['user'].get('name'))
+ self.assertListEqual(list(), mapped_properties['group_ids'])
+
+ def test_rule_engine_returns_group_names(self):
+ """Check whether RuleProcessor returns group names with their domains.
+
+ RuleProcessor should return 'group_names' entry with a list of
+ dictionaries with two entries 'name' and 'domain' identifying group by
+ its name and domain.
+
+ """
+ mapping = mapping_fixtures.MAPPING_GROUP_NAMES
+ rp = mapping_utils.RuleProcessor(mapping['rules'])
+ assertion = mapping_fixtures.EMPLOYEE_ASSERTION
+ mapped_properties = rp.process(assertion)
+ self.assertIsNotNone(mapped_properties)
+ self.assertValidMappedUserObject(mapped_properties)
+ reference = {
+ mapping_fixtures.DEVELOPER_GROUP_NAME:
+ {
+ "name": mapping_fixtures.DEVELOPER_GROUP_NAME,
+ "domain": {
+ "name": mapping_fixtures.DEVELOPER_GROUP_DOMAIN_NAME
+ }
+ },
+ mapping_fixtures.TESTER_GROUP_NAME:
+ {
+ "name": mapping_fixtures.TESTER_GROUP_NAME,
+ "domain": {
+ "id": mapping_fixtures.DEVELOPER_GROUP_DOMAIN_ID
+ }
+ }
+ }
+ for rule in mapped_properties['group_names']:
+ self.assertDictEqual(reference.get(rule.get('name')), rule)
+
+ def test_rule_engine_whitelist_and_direct_groups_mapping(self):
+ """Should return user's groups Developer and Contractor.
+
+ The EMPLOYEE_ASSERTION_MULTIPLE_GROUPS should successfully have a match
+ in MAPPING_GROUPS_WHITELIST. It will test the case where 'whitelist'
+ correctly filters out Manager and only allows Developer and Contractor.
+
+ """
+
+ mapping = mapping_fixtures.MAPPING_GROUPS_WHITELIST
+ assertion = mapping_fixtures.EMPLOYEE_ASSERTION_MULTIPLE_GROUPS
+ rp = mapping_utils.RuleProcessor(mapping['rules'])
+ mapped_properties = rp.process(assertion)
+ self.assertIsNotNone(mapped_properties)
+
+ reference = {
+ mapping_fixtures.DEVELOPER_GROUP_NAME:
+ {
+ "name": mapping_fixtures.DEVELOPER_GROUP_NAME,
+ "domain": {
+ "id": mapping_fixtures.DEVELOPER_GROUP_DOMAIN_ID
+ }
+ },
+ mapping_fixtures.CONTRACTOR_GROUP_NAME:
+ {
+ "name": mapping_fixtures.CONTRACTOR_GROUP_NAME,
+ "domain": {
+ "id": mapping_fixtures.DEVELOPER_GROUP_DOMAIN_ID
+ }
+ }
+ }
+ for rule in mapped_properties['group_names']:
+ self.assertDictEqual(reference.get(rule.get('name')), rule)
+
+ self.assertEqual('tbo', mapped_properties['user']['name'])
+ self.assertEqual([], mapped_properties['group_ids'])
+
+ def test_rule_engine_blacklist_and_direct_groups_mapping(self):
+ """Should return user's group Developer.
+
+ The EMPLOYEE_ASSERTION_MULTIPLE_GROUPS should successfully have a match
+ in MAPPING_GROUPS_BLACKLIST. It will test the case where 'blacklist'
+ correctly filters out Manager and Developer and only allows Contractor.
+
+ """
+
+ mapping = mapping_fixtures.MAPPING_GROUPS_BLACKLIST
+ assertion = mapping_fixtures.EMPLOYEE_ASSERTION_MULTIPLE_GROUPS
+ rp = mapping_utils.RuleProcessor(mapping['rules'])
+ mapped_properties = rp.process(assertion)
+ self.assertIsNotNone(mapped_properties)
+
+ reference = {
+ mapping_fixtures.CONTRACTOR_GROUP_NAME:
+ {
+ "name": mapping_fixtures.CONTRACTOR_GROUP_NAME,
+ "domain": {
+ "id": mapping_fixtures.DEVELOPER_GROUP_DOMAIN_ID
+ }
+ }
+ }
+ for rule in mapped_properties['group_names']:
+ self.assertDictEqual(reference.get(rule.get('name')), rule)
+ self.assertEqual('tbo', mapped_properties['user']['name'])
+ self.assertEqual([], mapped_properties['group_ids'])
+
+ def test_rule_engine_blacklist_and_direct_groups_mapping_multiples(self):
+ """Tests matching multiple values before the blacklist.
+
+ Verifies that the local indexes are correct when matching multiple
+ remote values for a field when the field occurs before the blacklist
+ entry in the remote rules.
+
+ """
+
+ mapping = mapping_fixtures.MAPPING_GROUPS_BLACKLIST_MULTIPLES
+ assertion = mapping_fixtures.EMPLOYEE_ASSERTION_MULTIPLE_GROUPS
+ rp = mapping_utils.RuleProcessor(mapping['rules'])
+ mapped_properties = rp.process(assertion)
+ self.assertIsNotNone(mapped_properties)
+
+ reference = {
+ mapping_fixtures.CONTRACTOR_GROUP_NAME:
+ {
+ "name": mapping_fixtures.CONTRACTOR_GROUP_NAME,
+ "domain": {
+ "id": mapping_fixtures.DEVELOPER_GROUP_DOMAIN_ID
+ }
+ }
+ }
+ for rule in mapped_properties['group_names']:
+ self.assertDictEqual(reference.get(rule.get('name')), rule)
+ self.assertEqual('tbo', mapped_properties['user']['name'])
+ self.assertEqual([], mapped_properties['group_ids'])
+
+ def test_rule_engine_whitelist_direct_group_mapping_missing_domain(self):
+ """Test if the local rule is rejected upon missing domain value
+
+ This is a variation with a ``whitelist`` filter.
+
+ """
+ mapping = mapping_fixtures.MAPPING_GROUPS_WHITELIST_MISSING_DOMAIN
+ assertion = mapping_fixtures.EMPLOYEE_ASSERTION_MULTIPLE_GROUPS
+ rp = mapping_utils.RuleProcessor(mapping['rules'])
+ self.assertRaises(exception.ValidationError, rp.process, assertion)
+
+ def test_rule_engine_blacklist_direct_group_mapping_missing_domain(self):
+ """Test if the local rule is rejected upon missing domain value
+
+ This is a variation with a ``blacklist`` filter.
+
+ """
+ mapping = mapping_fixtures.MAPPING_GROUPS_BLACKLIST_MISSING_DOMAIN
+ assertion = mapping_fixtures.EMPLOYEE_ASSERTION_MULTIPLE_GROUPS
+ rp = mapping_utils.RuleProcessor(mapping['rules'])
+ self.assertRaises(exception.ValidationError, rp.process, assertion)
+
+ def test_rule_engine_no_groups_allowed(self):
+ """Should return user mapped to no groups.
+
+ The EMPLOYEE_ASSERTION should successfully have a match
+ in MAPPING_GROUPS_WHITELIST, but 'whitelist' should filter out
+ the group values from the assertion and thus map to no groups.
+
+ """
+ mapping = mapping_fixtures.MAPPING_GROUPS_WHITELIST
+ assertion = mapping_fixtures.EMPLOYEE_ASSERTION
+ rp = mapping_utils.RuleProcessor(mapping['rules'])
+ mapped_properties = rp.process(assertion)
+ self.assertIsNotNone(mapped_properties)
+ self.assertListEqual(mapped_properties['group_names'], [])
+ self.assertListEqual(mapped_properties['group_ids'], [])
+ self.assertEqual('tbo', mapped_properties['user']['name'])
+
+ def test_mapping_federated_domain_specified(self):
+ """Test mapping engine when domain 'ephemeral' is explicitely set.
+
+ For that, we use mapping rule MAPPING_EPHEMERAL_USER and assertion
+ EMPLOYEE_ASSERTION
+
+ """
+ mapping = mapping_fixtures.MAPPING_EPHEMERAL_USER
+ rp = mapping_utils.RuleProcessor(mapping['rules'])
+ assertion = mapping_fixtures.EMPLOYEE_ASSERTION
+ mapped_properties = rp.process(assertion)
+ self.assertIsNotNone(mapped_properties)
+ self.assertValidMappedUserObject(mapped_properties)
+
+ def test_create_user_object_with_bad_mapping(self):
+ """Test if user object is created even with bad mapping.
+
+ User objects will be created by mapping engine always as long as there
+ is corresponding local rule. This test shows, that even with assertion
+ where no group names nor ids are matched, but there is 'blind' rule for
+ mapping user, such object will be created.
+
+ In this test MAPPING_EHPEMERAL_USER expects UserName set to jsmith
+ whereas value from assertion is 'tbo'.
+
+ """
+ mapping = mapping_fixtures.MAPPING_EPHEMERAL_USER
+ rp = mapping_utils.RuleProcessor(mapping['rules'])
+ assertion = mapping_fixtures.CONTRACTOR_ASSERTION
+ mapped_properties = rp.process(assertion)
+ self.assertIsNotNone(mapped_properties)
+ self.assertValidMappedUserObject(mapped_properties)
+
+ self.assertNotIn('id', mapped_properties['user'])
+ self.assertNotIn('name', mapped_properties['user'])
+
+ def test_set_ephemeral_domain_to_ephemeral_users(self):
+ """Test auto assigning service domain to ephemeral users.
+
+ Test that ephemeral users will always become members of federated
+ service domain. The check depends on ``type`` value which must be set
+ to ``ephemeral`` in case of ephemeral user.
+
+ """
+ mapping = mapping_fixtures.MAPPING_EPHEMERAL_USER_LOCAL_DOMAIN
+ rp = mapping_utils.RuleProcessor(mapping['rules'])
+ assertion = mapping_fixtures.CONTRACTOR_ASSERTION
+ mapped_properties = rp.process(assertion)
+ self.assertIsNotNone(mapped_properties)
+ self.assertValidMappedUserObject(mapped_properties)
+
+ def test_local_user_local_domain(self):
+ """Test that local users can have non-service domains assigned."""
+ mapping = mapping_fixtures.MAPPING_LOCAL_USER_LOCAL_DOMAIN
+ rp = mapping_utils.RuleProcessor(mapping['rules'])
+ assertion = mapping_fixtures.CONTRACTOR_ASSERTION
+ mapped_properties = rp.process(assertion)
+ self.assertIsNotNone(mapped_properties)
+ self.assertValidMappedUserObject(
+ mapped_properties, user_type='local',
+ domain_id=mapping_fixtures.LOCAL_DOMAIN)
+
+ def test_user_identifications_name(self):
+ """Test varius mapping options and how users are identified.
+
+ This test calls mapped.setup_username() for propagating user object.
+
+ Test plan:
+ - Check if the user has proper domain ('federated') set
+ - Check if the user has property type set ('ephemeral')
+ - Check if user's name is properly mapped from the assertion
+ - Check if user's id is properly set and equal to name, as it was not
+ explicitely specified in the mapping.
+
+ """
+ mapping = mapping_fixtures.MAPPING_USER_IDS
+ rp = mapping_utils.RuleProcessor(mapping['rules'])
+ assertion = mapping_fixtures.CONTRACTOR_ASSERTION
+ mapped_properties = rp.process(assertion)
+ self.assertIsNotNone(mapped_properties)
+ self.assertValidMappedUserObject(mapped_properties)
+ mapped.setup_username({}, mapped_properties)
+ self.assertEqual('jsmith', mapped_properties['user']['id'])
+ self.assertEqual('jsmith', mapped_properties['user']['name'])
+
+ def test_user_identifications_name_and_federated_domain(self):
+ """Test varius mapping options and how users are identified.
+
+ This test calls mapped.setup_username() for propagating user object.
+
+ Test plan:
+ - Check if the user has proper domain ('federated') set
+ - Check if the user has propert type set ('ephemeral')
+ - Check if user's name is properly mapped from the assertion
+ - Check if user's id is properly set and equal to name, as it was not
+ explicitely specified in the mapping.
+
+ """
+ mapping = mapping_fixtures.MAPPING_USER_IDS
+ rp = mapping_utils.RuleProcessor(mapping['rules'])
+ assertion = mapping_fixtures.EMPLOYEE_ASSERTION
+ mapped_properties = rp.process(assertion)
+ self.assertIsNotNone(mapped_properties)
+ self.assertValidMappedUserObject(mapped_properties)
+ mapped.setup_username({}, mapped_properties)
+ self.assertEqual('tbo', mapped_properties['user']['name'])
+ self.assertEqual('tbo', mapped_properties['user']['id'])
+
+ def test_user_identification_id(self):
+ """Test varius mapping options and how users are identified.
+
+ This test calls mapped.setup_username() for propagating user object.
+
+ Test plan:
+ - Check if the user has proper domain ('federated') set
+ - Check if the user has propert type set ('ephemeral')
+ - Check if user's id is properly mapped from the assertion
+ - Check if user's name is properly set and equal to id, as it was not
+ explicitely specified in the mapping.
+
+ """
+ mapping = mapping_fixtures.MAPPING_USER_IDS
+ rp = mapping_utils.RuleProcessor(mapping['rules'])
+ assertion = mapping_fixtures.ADMIN_ASSERTION
+ mapped_properties = rp.process(assertion)
+ context = {'environment': {}}
+ self.assertIsNotNone(mapped_properties)
+ self.assertValidMappedUserObject(mapped_properties)
+ mapped.setup_username(context, mapped_properties)
+ self.assertEqual('bob', mapped_properties['user']['name'])
+ self.assertEqual('bob', mapped_properties['user']['id'])
+
+ def test_user_identification_id_and_name(self):
+ """Test varius mapping options and how users are identified.
+
+ This test calls mapped.setup_username() for propagating user object.
+
+ Test plan:
+ - Check if the user has proper domain ('federated') set
+ - Check if the user has proper type set ('ephemeral')
+ - Check if user's name is properly mapped from the assertion
+ - Check if user's id is properly set and and equal to value hardcoded
+ in the mapping
+
+ """
+ mapping = mapping_fixtures.MAPPING_USER_IDS
+ rp = mapping_utils.RuleProcessor(mapping['rules'])
+ assertion = mapping_fixtures.CUSTOMER_ASSERTION
+ mapped_properties = rp.process(assertion)
+ context = {'environment': {}}
+ self.assertIsNotNone(mapped_properties)
+ self.assertValidMappedUserObject(mapped_properties)
+ mapped.setup_username(context, mapped_properties)
+ self.assertEqual('bwilliams', mapped_properties['user']['name'])
+ self.assertEqual('abc123', mapped_properties['user']['id'])
+
+
+class FederatedTokenTests(FederationTests, FederatedSetupMixin):
+
+ def auth_plugin_config_override(self):
+ methods = ['saml2']
+ method_classes = {'saml2': 'keystone.auth.plugins.saml2.Saml2'}
+ super(FederatedTokenTests, self).auth_plugin_config_override(
+ methods, **method_classes)
+
+ def setUp(self):
+ super(FederatedTokenTests, self).setUp()
+ self._notifications = []
+
+ def fake_saml_notify(action, context, user_id, group_ids,
+ identity_provider, protocol, token_id, outcome):
+ note = {
+ 'action': action,
+ 'user_id': user_id,
+ 'identity_provider': identity_provider,
+ 'protocol': protocol,
+ 'send_notification_called': True}
+ self._notifications.append(note)
+
+ self.useFixture(mockpatch.PatchObject(
+ notifications,
+ 'send_saml_audit_notification',
+ fake_saml_notify))
+
+ def _assert_last_notify(self, action, identity_provider, protocol,
+ user_id=None):
+ self.assertTrue(self._notifications)
+ note = self._notifications[-1]
+ if user_id:
+ self.assertEqual(note['user_id'], user_id)
+ self.assertEqual(note['action'], action)
+ self.assertEqual(note['identity_provider'], identity_provider)
+ self.assertEqual(note['protocol'], protocol)
+ self.assertTrue(note['send_notification_called'])
+
+ def load_fixtures(self, fixtures):
+ super(FederationTests, self).load_fixtures(fixtures)
+ self.load_federation_sample_data()
+
+ def test_issue_unscoped_token_notify(self):
+ self._issue_unscoped_token()
+ self._assert_last_notify(self.ACTION, self.IDP, self.PROTOCOL)
+
+ def test_issue_unscoped_token(self):
+ r = self._issue_unscoped_token()
+ self.assertIsNotNone(r.headers.get('X-Subject-Token'))
+
+ def test_issue_unscoped_token_disabled_idp(self):
+ """Checks if authentication works with disabled identity providers.
+
+ Test plan:
+ 1) Disable default IdP
+ 2) Try issuing unscoped token for that IdP
+ 3) Expect server to forbid authentication
+
+ """
+ enabled_false = {'enabled': False}
+ self.federation_api.update_idp(self.IDP, enabled_false)
+ self.assertRaises(exception.Forbidden,
+ self._issue_unscoped_token)
+
+ def test_issue_unscoped_token_group_names_in_mapping(self):
+ r = self._issue_unscoped_token(assertion='ANOTHER_CUSTOMER_ASSERTION')
+ ref_groups = set([self.group_customers['id'], self.group_admins['id']])
+ token_resp = r.json_body
+ token_groups = token_resp['token']['user']['OS-FEDERATION']['groups']
+ token_groups = set([group['id'] for group in token_groups])
+ self.assertEqual(ref_groups, token_groups)
+
+ def test_issue_unscoped_tokens_nonexisting_group(self):
+ self.assertRaises(exception.MissingGroups,
+ self._issue_unscoped_token,
+ assertion='ANOTHER_TESTER_ASSERTION')
+
+ def test_issue_unscoped_token_with_remote_no_attribute(self):
+ r = self._issue_unscoped_token(idp=self.IDP_WITH_REMOTE,
+ environment={
+ self.REMOTE_ID_ATTR: self.REMOTE_ID
+ })
+ self.assertIsNotNone(r.headers.get('X-Subject-Token'))
+
+ def test_issue_unscoped_token_with_remote(self):
+ self.config_fixture.config(group='federation',
+ remote_id_attribute=self.REMOTE_ID_ATTR)
+ r = self._issue_unscoped_token(idp=self.IDP_WITH_REMOTE,
+ environment={
+ self.REMOTE_ID_ATTR: self.REMOTE_ID
+ })
+ self.assertIsNotNone(r.headers.get('X-Subject-Token'))
+
+ def test_issue_unscoped_token_with_remote_different(self):
+ self.config_fixture.config(group='federation',
+ remote_id_attribute=self.REMOTE_ID_ATTR)
+ self.assertRaises(exception.Forbidden,
+ self._issue_unscoped_token,
+ idp=self.IDP_WITH_REMOTE,
+ environment={
+ self.REMOTE_ID_ATTR: uuid.uuid4().hex
+ })
+
+ def test_issue_unscoped_token_with_remote_unavailable(self):
+ self.config_fixture.config(group='federation',
+ remote_id_attribute=self.REMOTE_ID_ATTR)
+ self.assertRaises(exception.ValidationError,
+ self._issue_unscoped_token,
+ idp=self.IDP_WITH_REMOTE,
+ environment={
+ uuid.uuid4().hex: uuid.uuid4().hex
+ })
+
+ def test_issue_unscoped_token_with_remote_user_as_empty_string(self):
+ # make sure that REMOTE_USER set as the empty string won't interfere
+ r = self._issue_unscoped_token(environment={'REMOTE_USER': ''})
+ self.assertIsNotNone(r.headers.get('X-Subject-Token'))
+
+ def test_issue_unscoped_token_no_groups(self):
+ self.assertRaises(exception.Unauthorized,
+ self._issue_unscoped_token,
+ assertion='BAD_TESTER_ASSERTION')
+
+ def test_issue_unscoped_token_malformed_environment(self):
+ """Test whether non string objects are filtered out.
+
+ Put non string objects into the environment, inject
+ correct assertion and try to get an unscoped token.
+ Expect server not to fail on using split() method on
+ non string objects and return token id in the HTTP header.
+
+ """
+ api = auth_controllers.Auth()
+ context = {
+ 'environment': {
+ 'malformed_object': object(),
+ 'another_bad_idea': tuple(xrange(10)),
+ 'yet_another_bad_param': dict(zip(uuid.uuid4().hex,
+ range(32)))
+ }
+ }
+ self._inject_assertion(context, 'EMPLOYEE_ASSERTION')
+ r = api.authenticate_for_token(context, self.UNSCOPED_V3_SAML2_REQ)
+ self.assertIsNotNone(r.headers.get('X-Subject-Token'))
+
+ def test_scope_to_project_once_notify(self):
+ r = self.v3_authenticate_token(
+ self.TOKEN_SCOPE_PROJECT_EMPLOYEE_FROM_EMPLOYEE)
+ user_id = r.json['token']['user']['id']
+ self._assert_last_notify(self.ACTION, self.IDP, self.PROTOCOL, user_id)
+
+ def test_scope_to_project_once(self):
+ r = self.v3_authenticate_token(
+ self.TOKEN_SCOPE_PROJECT_EMPLOYEE_FROM_EMPLOYEE)
+ token_resp = r.result['token']
+ project_id = token_resp['project']['id']
+ self.assertEqual(project_id, self.proj_employees['id'])
+ self._check_scoped_token_attributes(token_resp)
+ roles_ref = [self.role_employee]
+ projects_ref = self.proj_employees
+ self._check_projects_and_roles(token_resp, roles_ref, projects_ref)
+
+ def test_scope_token_with_idp_disabled(self):
+ """Scope token issued by disabled IdP.
+
+ Try scoping the token issued by an IdP which is disabled now. Expect
+ server to refuse scoping operation.
+
+ This test confirms correct behaviour when IdP was enabled and unscoped
+ token was issued, but disabled before user tries to scope the token.
+ Here we assume the unscoped token was already issued and start from
+ the moment where IdP is being disabled and unscoped token is being
+ used.
+
+ Test plan:
+ 1) Disable IdP
+ 2) Try scoping unscoped token
+
+ """
+ enabled_false = {'enabled': False}
+ self.federation_api.update_idp(self.IDP, enabled_false)
+ self.v3_authenticate_token(
+ self.TOKEN_SCOPE_PROJECT_EMPLOYEE_FROM_CUSTOMER,
+ expected_status=403)
+
+ def test_scope_to_bad_project(self):
+ """Scope unscoped token with a project we don't have access to."""
+
+ self.v3_authenticate_token(
+ self.TOKEN_SCOPE_PROJECT_EMPLOYEE_FROM_CUSTOMER,
+ expected_status=401)
+
+ def test_scope_to_project_multiple_times(self):
+ """Try to scope the unscoped token multiple times.
+
+ The new tokens should be scoped to:
+
+ * Customers' project
+ * Employees' project
+
+ """
+
+ bodies = (self.TOKEN_SCOPE_PROJECT_EMPLOYEE_FROM_ADMIN,
+ self.TOKEN_SCOPE_PROJECT_CUSTOMER_FROM_ADMIN)
+ project_ids = (self.proj_employees['id'],
+ self.proj_customers['id'])
+ for body, project_id_ref in zip(bodies, project_ids):
+ r = self.v3_authenticate_token(body)
+ token_resp = r.result['token']
+ project_id = token_resp['project']['id']
+ self.assertEqual(project_id, project_id_ref)
+ self._check_scoped_token_attributes(token_resp)
+
+ def test_scope_to_project_with_only_inherited_roles(self):
+ """Try to scope token whose only roles are inherited."""
+ self.config_fixture.config(group='os_inherit', enabled=True)
+ r = self.v3_authenticate_token(
+ self.TOKEN_SCOPE_PROJECT_INHERITED_FROM_CUSTOMER)
+ token_resp = r.result['token']
+ project_id = token_resp['project']['id']
+ self.assertEqual(project_id, self.project_inherited['id'])
+ self._check_scoped_token_attributes(token_resp)
+ roles_ref = [self.role_customer]
+ projects_ref = self.project_inherited
+ self._check_projects_and_roles(token_resp, roles_ref, projects_ref)
+
+ def test_scope_token_from_nonexistent_unscoped_token(self):
+ """Try to scope token from non-existent unscoped token."""
+ self.v3_authenticate_token(
+ self.TOKEN_SCOPE_PROJECT_FROM_NONEXISTENT_TOKEN,
+ expected_status=404)
+
+ def test_issue_token_from_rules_without_user(self):
+ api = auth_controllers.Auth()
+ context = {'environment': {}}
+ self._inject_assertion(context, 'BAD_TESTER_ASSERTION')
+ self.assertRaises(exception.Unauthorized,
+ api.authenticate_for_token,
+ context, self.UNSCOPED_V3_SAML2_REQ)
+
+ def test_issue_token_with_nonexistent_group(self):
+ """Inject assertion that matches rule issuing bad group id.
+
+ Expect server to find out that some groups are missing in the
+ backend and raise exception.MappedGroupNotFound exception.
+
+ """
+ self.assertRaises(exception.MappedGroupNotFound,
+ self._issue_unscoped_token,
+ assertion='CONTRACTOR_ASSERTION')
+
+ def test_scope_to_domain_once(self):
+ r = self.v3_authenticate_token(self.TOKEN_SCOPE_DOMAIN_A_FROM_CUSTOMER)
+ token_resp = r.result['token']
+ domain_id = token_resp['domain']['id']
+ self.assertEqual(self.domainA['id'], domain_id)
+ self._check_scoped_token_attributes(token_resp)
+
+ def test_scope_to_domain_multiple_tokens(self):
+ """Issue multiple tokens scoping to different domains.
+
+ The new tokens should be scoped to:
+
+ * domainA
+ * domainB
+ * domainC
+
+ """
+ bodies = (self.TOKEN_SCOPE_DOMAIN_A_FROM_ADMIN,
+ self.TOKEN_SCOPE_DOMAIN_B_FROM_ADMIN,
+ self.TOKEN_SCOPE_DOMAIN_C_FROM_ADMIN)
+ domain_ids = (self.domainA['id'],
+ self.domainB['id'],
+ self.domainC['id'])
+
+ for body, domain_id_ref in zip(bodies, domain_ids):
+ r = self.v3_authenticate_token(body)
+ token_resp = r.result['token']
+ domain_id = token_resp['domain']['id']
+ self.assertEqual(domain_id_ref, domain_id)
+ self._check_scoped_token_attributes(token_resp)
+
+ def test_scope_to_domain_with_only_inherited_roles_fails(self):
+ """Try to scope to a domain that has no direct roles."""
+ self.v3_authenticate_token(
+ self.TOKEN_SCOPE_DOMAIN_D_FROM_CUSTOMER,
+ expected_status=401)
+
+ def test_list_projects(self):
+ urls = ('/OS-FEDERATION/projects', '/auth/projects')
+
+ token = (self.tokens['CUSTOMER_ASSERTION'],
+ self.tokens['EMPLOYEE_ASSERTION'],
+ self.tokens['ADMIN_ASSERTION'])
+
+ self.config_fixture.config(group='os_inherit', enabled=True)
+ projects_refs = (set([self.proj_customers['id'],
+ self.project_inherited['id']]),
+ set([self.proj_employees['id'],
+ self.project_all['id']]),
+ set([self.proj_employees['id'],
+ self.project_all['id'],
+ self.proj_customers['id'],
+ self.project_inherited['id']]))
+
+ for token, projects_ref in zip(token, projects_refs):
+ for url in urls:
+ r = self.get(url, token=token)
+ projects_resp = r.result['projects']
+ projects = set(p['id'] for p in projects_resp)
+ self.assertEqual(projects_ref, projects,
+ 'match failed for url %s' % url)
+
+ def test_list_domains(self):
+ urls = ('/OS-FEDERATION/domains', '/auth/domains')
+
+ tokens = (self.tokens['CUSTOMER_ASSERTION'],
+ self.tokens['EMPLOYEE_ASSERTION'],
+ self.tokens['ADMIN_ASSERTION'])
+
+ # NOTE(henry-nash): domain D does not appear in the expected results
+ # since it only had inherited roles (which only apply to projects
+ # within the domain)
+
+ domain_refs = (set([self.domainA['id']]),
+ set([self.domainA['id'],
+ self.domainB['id']]),
+ set([self.domainA['id'],
+ self.domainB['id'],
+ self.domainC['id']]))
+
+ for token, domains_ref in zip(tokens, domain_refs):
+ for url in urls:
+ r = self.get(url, token=token)
+ domains_resp = r.result['domains']
+ domains = set(p['id'] for p in domains_resp)
+ self.assertEqual(domains_ref, domains,
+ 'match failed for url %s' % url)
+
+ def test_full_workflow(self):
+ """Test 'standard' workflow for granting access tokens.
+
+ * Issue unscoped token
+ * List available projects based on groups
+ * Scope token to one of available projects
+
+ """
+
+ r = self._issue_unscoped_token()
+ employee_unscoped_token_id = r.headers.get('X-Subject-Token')
+ r = self.get('/OS-FEDERATION/projects',
+ token=employee_unscoped_token_id)
+ projects = r.result['projects']
+ random_project = random.randint(0, len(projects)) - 1
+ project = projects[random_project]
+
+ v3_scope_request = self._scope_request(employee_unscoped_token_id,
+ 'project', project['id'])
+
+ r = self.v3_authenticate_token(v3_scope_request)
+ token_resp = r.result['token']
+ project_id = token_resp['project']['id']
+ self.assertEqual(project['id'], project_id)
+ self._check_scoped_token_attributes(token_resp)
+
+ def test_workflow_with_groups_deletion(self):
+ """Test full workflow with groups deletion before token scoping.
+
+ The test scenario is as follows:
+ - Create group ``group``
+ - Create and assign roles to ``group`` and ``project_all``
+ - Patch mapping rules for existing IdP so it issues group id
+ - Issue unscoped token with ``group``'s id
+ - Delete group ``group``
+ - Scope token to ``project_all``
+ - Expect HTTP 500 response
+
+ """
+ # create group and role
+ group = self.new_group_ref(
+ domain_id=self.domainA['id'])
+ group = self.identity_api.create_group(group)
+ role = self.new_role_ref()
+ self.role_api.create_role(role['id'], role)
+
+ # assign role to group and project_admins
+ self.assignment_api.create_grant(role['id'],
+ group_id=group['id'],
+ project_id=self.project_all['id'])
+
+ rules = {
+ 'rules': [
+ {
+ 'local': [
+ {
+ 'group': {
+ 'id': group['id']
+ }
+ },
+ {
+ 'user': {
+ 'name': '{0}'
+ }
+ }
+ ],
+ 'remote': [
+ {
+ 'type': 'UserName'
+ },
+ {
+ 'type': 'LastName',
+ 'any_one_of': [
+ 'Account'
+ ]
+ }
+ ]
+ }
+ ]
+ }
+
+ self.federation_api.update_mapping(self.mapping['id'], rules)
+
+ r = self._issue_unscoped_token(assertion='TESTER_ASSERTION')
+ token_id = r.headers.get('X-Subject-Token')
+
+ # delete group
+ self.identity_api.delete_group(group['id'])
+
+ # scope token to project_all, expect HTTP 500
+ scoped_token = self._scope_request(
+ token_id, 'project',
+ self.project_all['id'])
+
+ self.v3_authenticate_token(scoped_token, expected_status=500)
+
+ def test_lists_with_missing_group_in_backend(self):
+ """Test a mapping that points to a group that does not exist
+
+ For explicit mappings, we expect the group to exist in the backend,
+ but for lists, specifically blacklists, a missing group is expected
+ as many groups will be specified by the IdP that are not Keystone
+ groups.
+
+ The test scenario is as follows:
+ - Create group ``EXISTS``
+ - Set mapping rules for existing IdP with a blacklist
+ that passes through as REMOTE_USER_GROUPS
+ - Issue unscoped token with on group ``EXISTS`` id in it
+
+ """
+ domain_id = self.domainA['id']
+ domain_name = self.domainA['name']
+ group = self.new_group_ref(domain_id=domain_id)
+ group['name'] = 'EXISTS'
+ group = self.identity_api.create_group(group)
+ rules = {
+ 'rules': [
+ {
+ "local": [
+ {
+ "user": {
+ "name": "{0}",
+ "id": "{0}"
+ }
+ }
+ ],
+ "remote": [
+ {
+ "type": "REMOTE_USER"
+ }
+ ]
+ },
+ {
+ "local": [
+ {
+ "groups": "{0}",
+ "domain": {"name": domain_name}
+ }
+ ],
+ "remote": [
+ {
+ "type": "REMOTE_USER_GROUPS",
+ "blacklist": ["noblacklist"]
+ }
+ ]
+ }
+ ]
+ }
+ self.federation_api.update_mapping(self.mapping['id'], rules)
+
+ r = self._issue_unscoped_token(assertion='UNMATCHED_GROUP_ASSERTION')
+ assigned_group_ids = r.json['token']['user']['OS-FEDERATION']['groups']
+ self.assertEqual(1, len(assigned_group_ids))
+ self.assertEqual(group['id'], assigned_group_ids[0]['id'])
+
+ def test_assertion_prefix_parameter(self):
+ """Test parameters filtering based on the prefix.
+
+ With ``assertion_prefix`` set to fixed, non default value,
+ issue an unscoped token from assertion EMPLOYEE_ASSERTION_PREFIXED.
+ Expect server to return unscoped token.
+
+ """
+ self.config_fixture.config(group='federation',
+ assertion_prefix=self.ASSERTION_PREFIX)
+ r = self._issue_unscoped_token(assertion='EMPLOYEE_ASSERTION_PREFIXED')
+ self.assertIsNotNone(r.headers.get('X-Subject-Token'))
+
+ def test_assertion_prefix_parameter_expect_fail(self):
+ """Test parameters filtering based on the prefix.
+
+ With ``assertion_prefix`` default value set to empty string
+ issue an unscoped token from assertion EMPLOYEE_ASSERTION.
+ Next, configure ``assertion_prefix`` to value ``UserName``.
+ Try issuing unscoped token with EMPLOYEE_ASSERTION.
+ Expect server to raise exception.Unathorized exception.
+
+ """
+ r = self._issue_unscoped_token()
+ self.assertIsNotNone(r.headers.get('X-Subject-Token'))
+ self.config_fixture.config(group='federation',
+ assertion_prefix='UserName')
+
+ self.assertRaises(exception.Unauthorized,
+ self._issue_unscoped_token)
+
+ def test_v2_auth_with_federation_token_fails(self):
+ """Test that using a federation token with v2 auth fails.
+
+ If an admin sets up a federated Keystone environment, and a user
+ incorrectly configures a service (like Nova) to only use v2 auth, the
+ returned message should be informative.
+
+ """
+ r = self._issue_unscoped_token()
+ token_id = r.headers.get('X-Subject-Token')
+ self.assertRaises(exception.Unauthorized,
+ self.token_provider_api.validate_v2_token,
+ token_id=token_id)
+
+ def test_unscoped_token_has_user_domain(self):
+ r = self._issue_unscoped_token()
+ self._check_domains_are_valid(r.json_body['token'])
+
+ def test_scoped_token_has_user_domain(self):
+ r = self.v3_authenticate_token(
+ self.TOKEN_SCOPE_PROJECT_EMPLOYEE_FROM_EMPLOYEE)
+ self._check_domains_are_valid(r.result['token'])
+
+ def test_issue_unscoped_token_for_local_user(self):
+ r = self._issue_unscoped_token(assertion='LOCAL_USER_ASSERTION')
+ token_resp = r.json_body['token']
+ self.assertListEqual(['saml2'], token_resp['methods'])
+ self.assertEqual(self.user['id'], token_resp['user']['id'])
+ self.assertEqual(self.user['name'], token_resp['user']['name'])
+ self.assertEqual(self.domain['id'], token_resp['user']['domain']['id'])
+ # Make sure the token is not scoped
+ self.assertNotIn('project', token_resp)
+ self.assertNotIn('domain', token_resp)
+
+ def test_issue_token_for_local_user_user_not_found(self):
+ self.assertRaises(exception.Unauthorized,
+ self._issue_unscoped_token,
+ assertion='ANOTHER_LOCAL_USER_ASSERTION')
+
+
+class FernetFederatedTokenTests(FederationTests, FederatedSetupMixin):
+ AUTH_METHOD = 'token'
+
+ def load_fixtures(self, fixtures):
+ super(FernetFederatedTokenTests, self).load_fixtures(fixtures)
+ self.load_federation_sample_data()
+
+ def auth_plugin_config_override(self):
+ methods = ['saml2', 'token', 'password']
+ method_classes = dict(
+ password='keystone.auth.plugins.password.Password',
+ token='keystone.auth.plugins.token.Token',
+ saml2='keystone.auth.plugins.saml2.Saml2')
+ super(FernetFederatedTokenTests,
+ self).auth_plugin_config_override(methods, **method_classes)
+ self.config_fixture.config(
+ group='token',
+ provider='keystone.token.providers.fernet.Provider')
+ self.useFixture(ksfixtures.KeyRepository(self.config_fixture))
+
+ def test_federated_unscoped_token(self):
+ resp = self._issue_unscoped_token()
+ self.assertEqual(186, len(resp.headers['X-Subject-Token']))
+
+ def test_federated_unscoped_token_with_multiple_groups(self):
+ assertion = 'ANOTHER_CUSTOMER_ASSERTION'
+ resp = self._issue_unscoped_token(assertion=assertion)
+ self.assertEqual(204, len(resp.headers['X-Subject-Token']))
+
+ def test_validate_federated_unscoped_token(self):
+ resp = self._issue_unscoped_token()
+ unscoped_token = resp.headers.get('X-Subject-Token')
+ # assert that the token we received is valid
+ self.get('/auth/tokens/', headers={'X-Subject-Token': unscoped_token})
+
+ def test_fernet_full_workflow(self):
+ """Test 'standard' workflow for granting Fernet access tokens.
+
+ * Issue unscoped token
+ * List available projects based on groups
+ * Scope token to one of available projects
+
+ """
+ resp = self._issue_unscoped_token()
+ unscoped_token = resp.headers.get('X-Subject-Token')
+ resp = self.get('/OS-FEDERATION/projects',
+ token=unscoped_token)
+ projects = resp.result['projects']
+ random_project = random.randint(0, len(projects)) - 1
+ project = projects[random_project]
+
+ v3_scope_request = self._scope_request(unscoped_token,
+ 'project', project['id'])
+
+ resp = self.v3_authenticate_token(v3_scope_request)
+ token_resp = resp.result['token']
+ project_id = token_resp['project']['id']
+ self.assertEqual(project['id'], project_id)
+ self._check_scoped_token_attributes(token_resp)
+
+
+class FederatedTokenTestsMethodToken(FederatedTokenTests):
+ """Test federation operation with unified scoping auth method.
+
+ Test all the operations with auth method set to ``token`` as a new, unified
+ way for scoping all the tokens.
+
+ """
+ AUTH_METHOD = 'token'
+
+ def auth_plugin_config_override(self):
+ methods = ['saml2', 'token']
+ method_classes = dict(
+ token='keystone.auth.plugins.token.Token',
+ saml2='keystone.auth.plugins.saml2.Saml2')
+ super(FederatedTokenTests,
+ self).auth_plugin_config_override(methods, **method_classes)
+
+
+class JsonHomeTests(FederationTests, test_v3.JsonHomeTestMixin):
+ JSON_HOME_DATA = {
+ 'http://docs.openstack.org/api/openstack-identity/3/ext/OS-FEDERATION/'
+ '1.0/rel/identity_provider': {
+ 'href-template': '/OS-FEDERATION/identity_providers/{idp_id}',
+ 'href-vars': {
+ 'idp_id': 'http://docs.openstack.org/api/openstack-identity/3/'
+ 'ext/OS-FEDERATION/1.0/param/idp_id'
+ },
+ },
+ }
+
+
+def _is_xmlsec1_installed():
+ p = subprocess.Popen(
+ ['which', 'xmlsec1'],
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+
+ # invert the return code
+ return not bool(p.wait())
+
+
+def _load_xml(filename):
+ with open(os.path.join(XMLDIR, filename), 'r') as xml:
+ return xml.read()
+
+
+class SAMLGenerationTests(FederationTests):
+
+ SP_AUTH_URL = ('http://beta.com:5000/v3/OS-FEDERATION/identity_providers'
+ '/BETA/protocols/saml2/auth')
+ ISSUER = 'https://acme.com/FIM/sps/openstack/saml20'
+ RECIPIENT = 'http://beta.com/Shibboleth.sso/SAML2/POST'
+ SUBJECT = 'test_user'
+ ROLES = ['admin', 'member']
+ PROJECT = 'development'
+ SAML_GENERATION_ROUTE = '/auth/OS-FEDERATION/saml2'
+ ASSERTION_VERSION = "2.0"
+ SERVICE_PROVDIER_ID = 'ACME'
+
+ def sp_ref(self):
+ ref = {
+ 'auth_url': self.SP_AUTH_URL,
+ 'enabled': True,
+ 'description': uuid.uuid4().hex,
+ 'sp_url': self.RECIPIENT,
+
+ }
+ return ref
+
+ def setUp(self):
+ super(SAMLGenerationTests, self).setUp()
+ self.signed_assertion = saml2.create_class_from_xml_string(
+ saml.Assertion, _load_xml('signed_saml2_assertion.xml'))
+ self.sp = self.sp_ref()
+ self.federation_api.create_sp(self.SERVICE_PROVDIER_ID, self.sp)
+
+ def test_samlize_token_values(self):
+ """Test the SAML generator produces a SAML object.
+
+ Test the SAML generator directly by passing known arguments, the result
+ should be a SAML object that consistently includes attributes based on
+ the known arguments that were passed in.
+
+ """
+ with mock.patch.object(keystone_idp, '_sign_assertion',
+ return_value=self.signed_assertion):
+ generator = keystone_idp.SAMLGenerator()
+ response = generator.samlize_token(self.ISSUER, self.RECIPIENT,
+ self.SUBJECT, self.ROLES,
+ self.PROJECT)
+
+ assertion = response.assertion
+ self.assertIsNotNone(assertion)
+ self.assertIsInstance(assertion, saml.Assertion)
+ issuer = response.issuer
+ self.assertEqual(self.RECIPIENT, response.destination)
+ self.assertEqual(self.ISSUER, issuer.text)
+
+ user_attribute = assertion.attribute_statement[0].attribute[0]
+ self.assertEqual(self.SUBJECT, user_attribute.attribute_value[0].text)
+
+ role_attribute = assertion.attribute_statement[0].attribute[1]
+ for attribute_value in role_attribute.attribute_value:
+ self.assertIn(attribute_value.text, self.ROLES)
+
+ project_attribute = assertion.attribute_statement[0].attribute[2]
+ self.assertEqual(self.PROJECT,
+ project_attribute.attribute_value[0].text)
+
+ def test_verify_assertion_object(self):
+ """Test that the Assertion object is built properly.
+
+ The Assertion doesn't need to be signed in this test, so
+ _sign_assertion method is patched and doesn't alter the assertion.
+
+ """
+ with mock.patch.object(keystone_idp, '_sign_assertion',
+ side_effect=lambda x: x):
+ generator = keystone_idp.SAMLGenerator()
+ response = generator.samlize_token(self.ISSUER, self.RECIPIENT,
+ self.SUBJECT, self.ROLES,
+ self.PROJECT)
+ assertion = response.assertion
+ self.assertEqual(self.ASSERTION_VERSION, assertion.version)
+
+ def test_valid_saml_xml(self):
+ """Test the generated SAML object can become valid XML.
+
+ Test the generator directly by passing known arguments, the result
+ should be a SAML object that consistently includes attributes based on
+ the known arguments that were passed in.
+
+ """
+ with mock.patch.object(keystone_idp, '_sign_assertion',
+ return_value=self.signed_assertion):
+ generator = keystone_idp.SAMLGenerator()
+ response = generator.samlize_token(self.ISSUER, self.RECIPIENT,
+ self.SUBJECT, self.ROLES,
+ self.PROJECT)
+
+ saml_str = response.to_string()
+ response = etree.fromstring(saml_str)
+ issuer = response[0]
+ assertion = response[2]
+
+ self.assertEqual(self.RECIPIENT, response.get('Destination'))
+ self.assertEqual(self.ISSUER, issuer.text)
+
+ user_attribute = assertion[4][0]
+ self.assertEqual(self.SUBJECT, user_attribute[0].text)
+
+ role_attribute = assertion[4][1]
+ for attribute_value in role_attribute:
+ self.assertIn(attribute_value.text, self.ROLES)
+
+ project_attribute = assertion[4][2]
+ self.assertEqual(self.PROJECT, project_attribute[0].text)
+
+ def test_assertion_using_explicit_namespace_prefixes(self):
+ def mocked_subprocess_check_output(*popenargs, **kwargs):
+ # the last option is the assertion file to be signed
+ filename = popenargs[0][-1]
+ with open(filename, 'r') as f:
+ assertion_content = f.read()
+ # since we are not testing the signature itself, we can return
+ # the assertion as is without signing it
+ return assertion_content
+
+ with mock.patch('subprocess.check_output',
+ side_effect=mocked_subprocess_check_output):
+ generator = keystone_idp.SAMLGenerator()
+ response = generator.samlize_token(self.ISSUER, self.RECIPIENT,
+ self.SUBJECT, self.ROLES,
+ self.PROJECT)
+ assertion_xml = response.assertion.to_string()
+ # make sure we have the proper tag and prefix for the assertion
+ # namespace
+ self.assertIn('<saml:Assertion', assertion_xml)
+ self.assertIn('xmlns:saml="' + saml2.NAMESPACE + '"',
+ assertion_xml)
+ self.assertIn('xmlns:xmldsig="' + xmldsig.NAMESPACE + '"',
+ assertion_xml)
+
+ def test_saml_signing(self):
+ """Test that the SAML generator produces a SAML object.
+
+ Test the SAML generator directly by passing known arguments, the result
+ should be a SAML object that consistently includes attributes based on
+ the known arguments that were passed in.
+
+ """
+ if not _is_xmlsec1_installed():
+ self.skip('xmlsec1 is not installed')
+
+ generator = keystone_idp.SAMLGenerator()
+ response = generator.samlize_token(self.ISSUER, self.RECIPIENT,
+ self.SUBJECT, self.ROLES,
+ self.PROJECT)
+
+ signature = response.assertion.signature
+ self.assertIsNotNone(signature)
+ self.assertIsInstance(signature, xmldsig.Signature)
+
+ idp_public_key = sigver.read_cert_from_file(CONF.saml.certfile, 'pem')
+ cert_text = signature.key_info.x509_data[0].x509_certificate.text
+ # NOTE(stevemar): Rather than one line of text, the certificate is
+ # printed with newlines for readability, we remove these so we can
+ # match it with the key that we used.
+ cert_text = cert_text.replace(os.linesep, '')
+ self.assertEqual(idp_public_key, cert_text)
+
+ def _create_generate_saml_request(self, token_id, sp_id):
+ return {
+ "auth": {
+ "identity": {
+ "methods": [
+ "token"
+ ],
+ "token": {
+ "id": token_id
+ }
+ },
+ "scope": {
+ "service_provider": {
+ "id": sp_id
+ }
+ }
+ }
+ }
+
+ def _fetch_valid_token(self):
+ auth_data = self.build_authentication_request(
+ user_id=self.user['id'],
+ password=self.user['password'],
+ project_id=self.project['id'])
+ resp = self.v3_authenticate_token(auth_data)
+ token_id = resp.headers.get('X-Subject-Token')
+ return token_id
+
+ def _fetch_domain_scoped_token(self):
+ auth_data = self.build_authentication_request(
+ user_id=self.user['id'],
+ password=self.user['password'],
+ user_domain_id=self.domain['id'])
+ resp = self.v3_authenticate_token(auth_data)
+ token_id = resp.headers.get('X-Subject-Token')
+ return token_id
+
+ def test_not_project_scoped_token(self):
+ """Ensure SAML generation fails when passing domain-scoped tokens.
+
+ The server should return a 403 Forbidden Action.
+
+ """
+ self.config_fixture.config(group='saml', idp_entity_id=self.ISSUER)
+ token_id = self._fetch_domain_scoped_token()
+ body = self._create_generate_saml_request(token_id,
+ self.SERVICE_PROVDIER_ID)
+ with mock.patch.object(keystone_idp, '_sign_assertion',
+ return_value=self.signed_assertion):
+ self.post(self.SAML_GENERATION_ROUTE, body=body,
+ expected_status=403)
+
+ def test_generate_saml_route(self):
+ """Test that the SAML generation endpoint produces XML.
+
+ The SAML endpoint /v3/auth/OS-FEDERATION/saml2 should take as input,
+ a scoped token ID, and a Service Provider ID.
+ The controller should fetch details about the user from the token,
+ and details about the service provider from its ID.
+ This should be enough information to invoke the SAML generator and
+ provide a valid SAML (XML) document back.
+
+ """
+ self.config_fixture.config(group='saml', idp_entity_id=self.ISSUER)
+ token_id = self._fetch_valid_token()
+ body = self._create_generate_saml_request(token_id,
+ self.SERVICE_PROVDIER_ID)
+
+ with mock.patch.object(keystone_idp, '_sign_assertion',
+ return_value=self.signed_assertion):
+ http_response = self.post(self.SAML_GENERATION_ROUTE, body=body,
+ response_content_type='text/xml',
+ expected_status=200)
+
+ response = etree.fromstring(http_response.result)
+ issuer = response[0]
+ assertion = response[2]
+
+ self.assertEqual(self.RECIPIENT, response.get('Destination'))
+ self.assertEqual(self.ISSUER, issuer.text)
+
+ # NOTE(stevemar): We should test this against expected values,
+ # but the self.xyz attribute names are uuids, and we mock out
+ # the result. Ideally we should update the mocked result with
+ # some known data, and create the roles/project/user before
+ # these tests run.
+ user_attribute = assertion[4][0]
+ self.assertIsInstance(user_attribute[0].text, str)
+
+ role_attribute = assertion[4][1]
+ self.assertIsInstance(role_attribute[0].text, str)
+
+ project_attribute = assertion[4][2]
+ self.assertIsInstance(project_attribute[0].text, str)
+
+ def test_invalid_scope_body(self):
+ """Test that missing the scope in request body raises an exception.
+
+ Raises exception.SchemaValidationError() - error code 400
+
+ """
+
+ token_id = uuid.uuid4().hex
+ body = self._create_generate_saml_request(token_id,
+ self.SERVICE_PROVDIER_ID)
+ del body['auth']['scope']
+
+ self.post(self.SAML_GENERATION_ROUTE, body=body, expected_status=400)
+
+ def test_invalid_token_body(self):
+ """Test that missing the token in request body raises an exception.
+
+ Raises exception.SchemaValidationError() - error code 400
+
+ """
+
+ token_id = uuid.uuid4().hex
+ body = self._create_generate_saml_request(token_id,
+ self.SERVICE_PROVDIER_ID)
+ del body['auth']['identity']['token']
+
+ self.post(self.SAML_GENERATION_ROUTE, body=body, expected_status=400)
+
+ def test_sp_not_found(self):
+ """Test SAML generation with an invalid service provider ID.
+
+ Raises exception.ServiceProviderNotFound() - error code 404
+
+ """
+ sp_id = uuid.uuid4().hex
+ token_id = self._fetch_valid_token()
+ body = self._create_generate_saml_request(token_id, sp_id)
+ self.post(self.SAML_GENERATION_ROUTE, body=body, expected_status=404)
+
+ def test_sp_disabled(self):
+ """Try generating assertion for disabled Service Provider."""
+
+ # Disable Service Provider
+ sp_ref = {'enabled': False}
+ self.federation_api.update_sp(self.SERVICE_PROVDIER_ID, sp_ref)
+
+ token_id = self._fetch_valid_token()
+ body = self._create_generate_saml_request(token_id,
+ self.SERVICE_PROVDIER_ID)
+ self.post(self.SAML_GENERATION_ROUTE, body=body, expected_status=403)
+
+ def test_token_not_found(self):
+ """Test that an invalid token in the request body raises an exception.
+
+ Raises exception.TokenNotFound() - error code 404
+
+ """
+
+ token_id = uuid.uuid4().hex
+ body = self._create_generate_saml_request(token_id,
+ self.SERVICE_PROVDIER_ID)
+ self.post(self.SAML_GENERATION_ROUTE, body=body, expected_status=404)
+
+
+class IdPMetadataGenerationTests(FederationTests):
+ """A class for testing Identity Provider Metadata generation."""
+
+ METADATA_URL = '/OS-FEDERATION/saml2/metadata'
+
+ def setUp(self):
+ super(IdPMetadataGenerationTests, self).setUp()
+ self.generator = keystone_idp.MetadataGenerator()
+
+ def config_overrides(self):
+ super(IdPMetadataGenerationTests, self).config_overrides()
+ self.config_fixture.config(
+ group='saml',
+ idp_entity_id=federation_fixtures.IDP_ENTITY_ID,
+ idp_sso_endpoint=federation_fixtures.IDP_SSO_ENDPOINT,
+ idp_organization_name=federation_fixtures.IDP_ORGANIZATION_NAME,
+ idp_organization_display_name=(
+ federation_fixtures.IDP_ORGANIZATION_DISPLAY_NAME),
+ idp_organization_url=federation_fixtures.IDP_ORGANIZATION_URL,
+ idp_contact_company=federation_fixtures.IDP_CONTACT_COMPANY,
+ idp_contact_name=federation_fixtures.IDP_CONTACT_GIVEN_NAME,
+ idp_contact_surname=federation_fixtures.IDP_CONTACT_SURNAME,
+ idp_contact_email=federation_fixtures.IDP_CONTACT_EMAIL,
+ idp_contact_telephone=(
+ federation_fixtures.IDP_CONTACT_TELEPHONE_NUMBER),
+ idp_contact_type=federation_fixtures.IDP_CONTACT_TYPE)
+
+ def test_check_entity_id(self):
+ metadata = self.generator.generate_metadata()
+ self.assertEqual(federation_fixtures.IDP_ENTITY_ID, metadata.entity_id)
+
+ def test_metadata_validity(self):
+ """Call md.EntityDescriptor method that does internal verification."""
+ self.generator.generate_metadata().verify()
+
+ def test_serialize_metadata_object(self):
+ """Check whether serialization doesn't raise any exceptions."""
+ self.generator.generate_metadata().to_string()
+ # TODO(marek-denis): Check values here
+
+ def test_check_idp_sso(self):
+ metadata = self.generator.generate_metadata()
+ idpsso_descriptor = metadata.idpsso_descriptor
+ self.assertIsNotNone(metadata.idpsso_descriptor)
+ self.assertEqual(federation_fixtures.IDP_SSO_ENDPOINT,
+ idpsso_descriptor.single_sign_on_service.location)
+
+ self.assertIsNotNone(idpsso_descriptor.organization)
+ organization = idpsso_descriptor.organization
+ self.assertEqual(federation_fixtures.IDP_ORGANIZATION_DISPLAY_NAME,
+ organization.organization_display_name.text)
+ self.assertEqual(federation_fixtures.IDP_ORGANIZATION_NAME,
+ organization.organization_name.text)
+ self.assertEqual(federation_fixtures.IDP_ORGANIZATION_URL,
+ organization.organization_url.text)
+
+ self.assertIsNotNone(idpsso_descriptor.contact_person)
+ contact_person = idpsso_descriptor.contact_person
+
+ self.assertEqual(federation_fixtures.IDP_CONTACT_GIVEN_NAME,
+ contact_person.given_name.text)
+ self.assertEqual(federation_fixtures.IDP_CONTACT_SURNAME,
+ contact_person.sur_name.text)
+ self.assertEqual(federation_fixtures.IDP_CONTACT_EMAIL,
+ contact_person.email_address.text)
+ self.assertEqual(federation_fixtures.IDP_CONTACT_TELEPHONE_NUMBER,
+ contact_person.telephone_number.text)
+ self.assertEqual(federation_fixtures.IDP_CONTACT_TYPE,
+ contact_person.contact_type)
+
+ def test_metadata_no_organization(self):
+ self.config_fixture.config(
+ group='saml',
+ idp_organization_display_name=None,
+ idp_organization_url=None,
+ idp_organization_name=None)
+ metadata = self.generator.generate_metadata()
+ idpsso_descriptor = metadata.idpsso_descriptor
+ self.assertIsNotNone(metadata.idpsso_descriptor)
+ self.assertIsNone(idpsso_descriptor.organization)
+ self.assertIsNotNone(idpsso_descriptor.contact_person)
+
+ def test_metadata_no_contact_person(self):
+ self.config_fixture.config(
+ group='saml',
+ idp_contact_name=None,
+ idp_contact_surname=None,
+ idp_contact_email=None,
+ idp_contact_telephone=None)
+ metadata = self.generator.generate_metadata()
+ idpsso_descriptor = metadata.idpsso_descriptor
+ self.assertIsNotNone(metadata.idpsso_descriptor)
+ self.assertIsNotNone(idpsso_descriptor.organization)
+ self.assertEqual([], idpsso_descriptor.contact_person)
+
+ def test_metadata_invalid_contact_type(self):
+ self.config_fixture.config(
+ group='saml',
+ idp_contact_type="invalid")
+ self.assertRaises(exception.ValidationError,
+ self.generator.generate_metadata)
+
+ def test_metadata_invalid_idp_sso_endpoint(self):
+ self.config_fixture.config(
+ group='saml',
+ idp_sso_endpoint=None)
+ self.assertRaises(exception.ValidationError,
+ self.generator.generate_metadata)
+
+ def test_metadata_invalid_idp_entity_id(self):
+ self.config_fixture.config(
+ group='saml',
+ idp_entity_id=None)
+ self.assertRaises(exception.ValidationError,
+ self.generator.generate_metadata)
+
+ def test_get_metadata_with_no_metadata_file_configured(self):
+ self.get(self.METADATA_URL, expected_status=500)
+
+ def test_get_metadata(self):
+ self.config_fixture.config(
+ group='saml', idp_metadata_path=XMLDIR + '/idp_saml2_metadata.xml')
+ r = self.get(self.METADATA_URL, response_content_type='text/xml',
+ expected_status=200)
+ self.assertEqual('text/xml', r.headers.get('Content-Type'))
+
+ reference_file = _load_xml('idp_saml2_metadata.xml')
+ self.assertEqual(reference_file, r.result)
+
+
+class ServiceProviderTests(FederationTests):
+ """A test class for Service Providers."""
+
+ MEMBER_NAME = 'service_provider'
+ COLLECTION_NAME = 'service_providers'
+ SERVICE_PROVIDER_ID = 'ACME'
+ SP_KEYS = ['auth_url', 'id', 'enabled', 'description', 'sp_url']
+
+ def setUp(self):
+ super(FederationTests, self).setUp()
+ # Add a Service Provider
+ url = self.base_url(suffix=self.SERVICE_PROVIDER_ID)
+ self.SP_REF = self.sp_ref()
+ self.SERVICE_PROVIDER = self.put(
+ url, body={'service_provider': self.SP_REF},
+ expected_status=201).result
+
+ def sp_ref(self):
+ ref = {
+ 'auth_url': 'https://' + uuid.uuid4().hex + '.com',
+ 'enabled': True,
+ 'description': uuid.uuid4().hex,
+ 'sp_url': 'https://' + uuid.uuid4().hex + '.com',
+ }
+ return ref
+
+ def base_url(self, suffix=None):
+ if suffix is not None:
+ return '/OS-FEDERATION/service_providers/' + str(suffix)
+ return '/OS-FEDERATION/service_providers'
+
+ def test_get_service_provider(self):
+ url = self.base_url(suffix=self.SERVICE_PROVIDER_ID)
+ resp = self.get(url, expected_status=200)
+ self.assertValidEntity(resp.result['service_provider'],
+ keys_to_check=self.SP_KEYS)
+
+ def test_get_service_provider_fail(self):
+ url = self.base_url(suffix=uuid.uuid4().hex)
+ self.get(url, expected_status=404)
+
+ def test_create_service_provider(self):
+ url = self.base_url(suffix=uuid.uuid4().hex)
+ sp = self.sp_ref()
+ resp = self.put(url, body={'service_provider': sp},
+ expected_status=201)
+ self.assertValidEntity(resp.result['service_provider'],
+ keys_to_check=self.SP_KEYS)
+
+ def test_create_service_provider_fail(self):
+ """Try adding SP object with unallowed attribute."""
+ url = self.base_url(suffix=uuid.uuid4().hex)
+ sp = self.sp_ref()
+ sp[uuid.uuid4().hex] = uuid.uuid4().hex
+ self.put(url, body={'service_provider': sp},
+ expected_status=400)
+
+ def test_list_service_providers(self):
+ """Test listing of service provider objects.
+
+ Add two new service providers. List all available service providers.
+ Expect to get list of three service providers (one created by setUp())
+ Test if attributes match.
+
+ """
+ ref_service_providers = {
+ uuid.uuid4().hex: self.sp_ref(),
+ uuid.uuid4().hex: self.sp_ref(),
+ }
+ for id, sp in ref_service_providers.items():
+ url = self.base_url(suffix=id)
+ self.put(url, body={'service_provider': sp}, expected_status=201)
+
+ # Insert ids into service provider object, we will compare it with
+ # responses from server and those include 'id' attribute.
+
+ ref_service_providers[self.SERVICE_PROVIDER_ID] = self.SP_REF
+ for id, sp in ref_service_providers.items():
+ sp['id'] = id
+
+ url = self.base_url()
+ resp = self.get(url)
+ service_providers = resp.result
+ for service_provider in service_providers['service_providers']:
+ id = service_provider['id']
+ self.assertValidEntity(
+ service_provider, ref=ref_service_providers[id],
+ keys_to_check=self.SP_KEYS)
+
+ def test_update_service_provider(self):
+ """Update existing service provider.
+
+ Update default existing service provider and make sure it has been
+ properly changed.
+
+ """
+ new_sp_ref = self.sp_ref()
+ url = self.base_url(suffix=self.SERVICE_PROVIDER_ID)
+ resp = self.patch(url, body={'service_provider': new_sp_ref},
+ expected_status=200)
+ patch_result = resp.result
+ new_sp_ref['id'] = self.SERVICE_PROVIDER_ID
+ self.assertValidEntity(patch_result['service_provider'],
+ ref=new_sp_ref,
+ keys_to_check=self.SP_KEYS)
+
+ resp = self.get(url, expected_status=200)
+ get_result = resp.result
+
+ self.assertDictEqual(patch_result['service_provider'],
+ get_result['service_provider'])
+
+ def test_update_service_provider_immutable_parameters(self):
+ """Update immutable attributes in service provider.
+
+ In this particular case the test will try to change ``id`` attribute.
+ The server should return an HTTP 403 error code.
+
+ """
+ new_sp_ref = {'id': uuid.uuid4().hex}
+ url = self.base_url(suffix=self.SERVICE_PROVIDER_ID)
+ self.patch(url, body={'service_provider': new_sp_ref},
+ expected_status=400)
+
+ def test_update_service_provider_unknown_parameter(self):
+ new_sp_ref = self.sp_ref()
+ new_sp_ref[uuid.uuid4().hex] = uuid.uuid4().hex
+ url = self.base_url(suffix=self.SERVICE_PROVIDER_ID)
+ self.patch(url, body={'service_provider': new_sp_ref},
+ expected_status=400)
+
+ def test_update_service_provider_404(self):
+ new_sp_ref = self.sp_ref()
+ new_sp_ref['description'] = uuid.uuid4().hex
+ url = self.base_url(suffix=uuid.uuid4().hex)
+ self.patch(url, body={'service_provider': new_sp_ref},
+ expected_status=404)
+
+ def test_delete_service_provider(self):
+ url = self.base_url(suffix=self.SERVICE_PROVIDER_ID)
+ self.delete(url, expected_status=204)
+
+ def test_delete_service_provider_404(self):
+ url = self.base_url(suffix=uuid.uuid4().hex)
+ self.delete(url, expected_status=404)
+
+
+class WebSSOTests(FederatedTokenTests):
+ """A class for testing Web SSO."""
+
+ SSO_URL = '/auth/OS-FEDERATION/websso/'
+ SSO_TEMPLATE_NAME = 'sso_callback_template.html'
+ SSO_TEMPLATE_PATH = os.path.join(core.dirs.etc(), SSO_TEMPLATE_NAME)
+ TRUSTED_DASHBOARD = 'http://horizon.com'
+ ORIGIN = urllib.parse.quote_plus(TRUSTED_DASHBOARD)
+
+ def setUp(self):
+ super(WebSSOTests, self).setUp()
+ self.api = federation_controllers.Auth()
+
+ def config_overrides(self):
+ super(WebSSOTests, self).config_overrides()
+ self.config_fixture.config(
+ group='federation',
+ trusted_dashboard=[self.TRUSTED_DASHBOARD],
+ sso_callback_template=self.SSO_TEMPLATE_PATH,
+ remote_id_attribute=self.REMOTE_ID_ATTR)
+
+ def test_render_callback_template(self):
+ token_id = uuid.uuid4().hex
+ resp = self.api.render_html_response(self.TRUSTED_DASHBOARD, token_id)
+ self.assertIn(token_id, resp.body)
+ self.assertIn(self.TRUSTED_DASHBOARD, resp.body)
+
+ def test_federated_sso_auth(self):
+ environment = {self.REMOTE_ID_ATTR: self.REMOTE_ID}
+ context = {'environment': environment}
+ query_string = {'origin': self.ORIGIN}
+ self._inject_assertion(context, 'EMPLOYEE_ASSERTION', query_string)
+ resp = self.api.federated_sso_auth(context, self.PROTOCOL)
+ self.assertIn(self.TRUSTED_DASHBOARD, resp.body)
+
+ def test_federated_sso_auth_bad_remote_id(self):
+ environment = {self.REMOTE_ID_ATTR: self.IDP}
+ context = {'environment': environment}
+ query_string = {'origin': self.ORIGIN}
+ self._inject_assertion(context, 'EMPLOYEE_ASSERTION', query_string)
+ self.assertRaises(exception.IdentityProviderNotFound,
+ self.api.federated_sso_auth,
+ context, self.PROTOCOL)
+
+ def test_federated_sso_missing_query(self):
+ environment = {self.REMOTE_ID_ATTR: self.REMOTE_ID}
+ context = {'environment': environment}
+ self._inject_assertion(context, 'EMPLOYEE_ASSERTION')
+ self.assertRaises(exception.ValidationError,
+ self.api.federated_sso_auth,
+ context, self.PROTOCOL)
+
+ def test_federated_sso_missing_query_bad_remote_id(self):
+ environment = {self.REMOTE_ID_ATTR: self.IDP}
+ context = {'environment': environment}
+ self._inject_assertion(context, 'EMPLOYEE_ASSERTION')
+ self.assertRaises(exception.ValidationError,
+ self.api.federated_sso_auth,
+ context, self.PROTOCOL)
+
+ def test_federated_sso_untrusted_dashboard(self):
+ environment = {self.REMOTE_ID_ATTR: self.REMOTE_ID}
+ context = {'environment': environment}
+ query_string = {'origin': uuid.uuid4().hex}
+ self._inject_assertion(context, 'EMPLOYEE_ASSERTION', query_string)
+ self.assertRaises(exception.Unauthorized,
+ self.api.federated_sso_auth,
+ context, self.PROTOCOL)
+
+ def test_federated_sso_untrusted_dashboard_bad_remote_id(self):
+ environment = {self.REMOTE_ID_ATTR: self.IDP}
+ context = {'environment': environment}
+ query_string = {'origin': uuid.uuid4().hex}
+ self._inject_assertion(context, 'EMPLOYEE_ASSERTION', query_string)
+ self.assertRaises(exception.Unauthorized,
+ self.api.federated_sso_auth,
+ context, self.PROTOCOL)
+
+ def test_federated_sso_missing_remote_id(self):
+ context = {'environment': {}}
+ query_string = {'origin': self.ORIGIN}
+ self._inject_assertion(context, 'EMPLOYEE_ASSERTION', query_string)
+ self.assertRaises(exception.Unauthorized,
+ self.api.federated_sso_auth,
+ context, self.PROTOCOL)
+
+
+class K2KServiceCatalogTests(FederationTests):
+ SP1 = 'SP1'
+ SP2 = 'SP2'
+ SP3 = 'SP3'
+
+ def setUp(self):
+ super(K2KServiceCatalogTests, self).setUp()
+
+ sp = self.sp_ref()
+ self.federation_api.create_sp(self.SP1, sp)
+ self.sp_alpha = {self.SP1: sp}
+
+ sp = self.sp_ref()
+ self.federation_api.create_sp(self.SP2, sp)
+ self.sp_beta = {self.SP2: sp}
+
+ sp = self.sp_ref()
+ self.federation_api.create_sp(self.SP3, sp)
+ self.sp_gamma = {self.SP3: sp}
+
+ self.token_v3_helper = token_common.V3TokenDataHelper()
+
+ def sp_response(self, id, ref):
+ ref.pop('enabled')
+ ref.pop('description')
+ ref['id'] = id
+ return ref
+
+ def sp_ref(self):
+ ref = {
+ 'auth_url': uuid.uuid4().hex,
+ 'enabled': True,
+ 'description': uuid.uuid4().hex,
+ 'sp_url': uuid.uuid4().hex,
+ }
+ return ref
+
+ def _validate_service_providers(self, token, ref):
+ token_data = token['token']
+ self.assertIn('service_providers', token_data)
+ self.assertIsNotNone(token_data['service_providers'])
+ service_providers = token_data.get('service_providers')
+
+ self.assertEqual(len(ref), len(service_providers))
+ for entity in service_providers:
+ id = entity.get('id')
+ ref_entity = self.sp_response(id, ref.get(id))
+ self.assertDictEqual(ref_entity, entity)
+
+ def test_service_providers_in_token(self):
+ """Check if service providers are listed in service catalog."""
+
+ token = self.token_v3_helper.get_token_data(self.user_id, ['password'])
+ ref = {}
+ for r in (self.sp_alpha, self.sp_beta, self.sp_gamma):
+ ref.update(r)
+ self._validate_service_providers(token, ref)
+
+ def test_service_provides_in_token_disabled_sp(self):
+ """Test behaviour with disabled service providers.
+
+ Disabled service providers should not be listed in the service
+ catalog.
+
+ """
+ # disable service provider ALPHA
+ sp_ref = {'enabled': False}
+ self.federation_api.update_sp(self.SP1, sp_ref)
+
+ token = self.token_v3_helper.get_token_data(self.user_id, ['password'])
+ ref = {}
+ for r in (self.sp_beta, self.sp_gamma):
+ ref.update(r)
+ self._validate_service_providers(token, ref)
+
+ def test_no_service_providers_in_token(self):
+ """Test service catalog with disabled service providers.
+
+ There should be no entry ``service_providers`` in the catalog.
+ Test passes providing no attribute was raised.
+
+ """
+ sp_ref = {'enabled': False}
+ for sp in (self.SP1, self.SP2, self.SP3):
+ self.federation_api.update_sp(sp, sp_ref)
+
+ token = self.token_v3_helper.get_token_data(self.user_id, ['password'])
+ self.assertNotIn('service_providers', token['token'],
+ message=('Expected Service Catalog not to have '
+ 'service_providers'))
diff --git a/keystone-moon/keystone/tests/unit/test_v3_filters.py b/keystone-moon/keystone/tests/unit/test_v3_filters.py
new file mode 100644
index 00000000..4ad44657
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/test_v3_filters.py
@@ -0,0 +1,452 @@
+# Copyright 2012 OpenStack LLC
+# Copyright 2013 IBM Corp.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import uuid
+
+from oslo_config import cfg
+from oslo_serialization import jsonutils
+
+from keystone.tests.unit import filtering
+from keystone.tests.unit.ksfixtures import temporaryfile
+from keystone.tests.unit import test_v3
+
+
+CONF = cfg.CONF
+
+
+class IdentityTestFilteredCase(filtering.FilterTests,
+ test_v3.RestfulTestCase):
+ """Test filter enforcement on the v3 Identity API."""
+
+ def setUp(self):
+ """Setup for Identity Filter Test Cases."""
+
+ super(IdentityTestFilteredCase, self).setUp()
+ self.tempfile = self.useFixture(temporaryfile.SecureTempFile())
+ self.tmpfilename = self.tempfile.file_name
+ self.config_fixture.config(group='oslo_policy',
+ policy_file=self.tmpfilename)
+
+ def load_sample_data(self):
+ """Create sample data for these tests.
+
+ As well as the usual housekeeping, create a set of domains,
+ users, roles and projects for the subsequent tests:
+
+ - Three domains: A,B & C. C is disabled.
+ - DomainA has user1, DomainB has user2 and user3
+ - DomainA has group1 and group2, DomainB has group3
+ - User1 has a role on DomainA
+
+ Remember that there will also be a fourth domain in existence,
+ the default domain.
+
+ """
+ # Start by creating a few domains
+ self._populate_default_domain()
+ self.domainA = self.new_domain_ref()
+ self.resource_api.create_domain(self.domainA['id'], self.domainA)
+ self.domainB = self.new_domain_ref()
+ self.resource_api.create_domain(self.domainB['id'], self.domainB)
+ self.domainC = self.new_domain_ref()
+ self.domainC['enabled'] = False
+ self.resource_api.create_domain(self.domainC['id'], self.domainC)
+
+ # Now create some users, one in domainA and two of them in domainB
+ self.user1 = self.new_user_ref(domain_id=self.domainA['id'])
+ password = uuid.uuid4().hex
+ self.user1['password'] = password
+ self.user1 = self.identity_api.create_user(self.user1)
+ self.user1['password'] = password
+
+ self.user2 = self.new_user_ref(domain_id=self.domainB['id'])
+ self.user2['password'] = password
+ self.user2 = self.identity_api.create_user(self.user2)
+ self.user2['password'] = password
+
+ self.user3 = self.new_user_ref(domain_id=self.domainB['id'])
+ self.user3['password'] = password
+ self.user3 = self.identity_api.create_user(self.user3)
+ self.user3['password'] = password
+
+ self.role = self.new_role_ref()
+ self.role_api.create_role(self.role['id'], self.role)
+ self.assignment_api.create_grant(self.role['id'],
+ user_id=self.user1['id'],
+ domain_id=self.domainA['id'])
+
+ # A default auth request we can use - un-scoped user token
+ self.auth = self.build_authentication_request(
+ user_id=self.user1['id'],
+ password=self.user1['password'])
+
+ def _get_id_list_from_ref_list(self, ref_list):
+ result_list = []
+ for x in ref_list:
+ result_list.append(x['id'])
+ return result_list
+
+ def _set_policy(self, new_policy):
+ with open(self.tmpfilename, "w") as policyfile:
+ policyfile.write(jsonutils.dumps(new_policy))
+
+ def test_list_users_filtered_by_domain(self):
+ """GET /users?domain_id=mydomain (filtered)
+
+ Test Plan:
+
+ - Update policy so api is unprotected
+ - Use an un-scoped token to make sure we can filter the
+ users by domainB, getting back the 2 users in that domain
+
+ """
+ self._set_policy({"identity:list_users": []})
+ url_by_name = '/users?domain_id=%s' % self.domainB['id']
+ r = self.get(url_by_name, auth=self.auth)
+ # We should get back two users, those in DomainB
+ id_list = self._get_id_list_from_ref_list(r.result.get('users'))
+ self.assertIn(self.user2['id'], id_list)
+ self.assertIn(self.user3['id'], id_list)
+
+ def test_list_filtered_domains(self):
+ """GET /domains?enabled=0
+
+ Test Plan:
+
+ - Update policy for no protection on api
+ - Filter by the 'enabled' boolean to get disabled domains, which
+ should return just domainC
+ - Try the filter using different ways of specifying True/False
+ to test that our handling of booleans in filter matching is
+ correct
+
+ """
+ new_policy = {"identity:list_domains": []}
+ self._set_policy(new_policy)
+ r = self.get('/domains?enabled=0', auth=self.auth)
+ id_list = self._get_id_list_from_ref_list(r.result.get('domains'))
+ self.assertEqual(1, len(id_list))
+ self.assertIn(self.domainC['id'], id_list)
+
+ # Try a few ways of specifying 'false'
+ for val in ('0', 'false', 'False', 'FALSE', 'n', 'no', 'off'):
+ r = self.get('/domains?enabled=%s' % val, auth=self.auth)
+ id_list = self._get_id_list_from_ref_list(r.result.get('domains'))
+ self.assertEqual([self.domainC['id']], id_list)
+
+ # Now try a few ways of specifying 'true' when we should get back
+ # the other two domains, plus the default domain
+ for val in ('1', 'true', 'True', 'TRUE', 'y', 'yes', 'on'):
+ r = self.get('/domains?enabled=%s' % val, auth=self.auth)
+ id_list = self._get_id_list_from_ref_list(r.result.get('domains'))
+ self.assertEqual(3, len(id_list))
+ self.assertIn(self.domainA['id'], id_list)
+ self.assertIn(self.domainB['id'], id_list)
+ self.assertIn(CONF.identity.default_domain_id, id_list)
+
+ r = self.get('/domains?enabled', auth=self.auth)
+ id_list = self._get_id_list_from_ref_list(r.result.get('domains'))
+ self.assertEqual(3, len(id_list))
+ self.assertIn(self.domainA['id'], id_list)
+ self.assertIn(self.domainB['id'], id_list)
+ self.assertIn(CONF.identity.default_domain_id, id_list)
+
+ def test_multiple_filters(self):
+ """GET /domains?enabled&name=myname
+
+ Test Plan:
+
+ - Update policy for no protection on api
+ - Filter by the 'enabled' boolean and name - this should
+ return a single domain
+
+ """
+ new_policy = {"identity:list_domains": []}
+ self._set_policy(new_policy)
+
+ my_url = '/domains?enabled&name=%s' % self.domainA['name']
+ r = self.get(my_url, auth=self.auth)
+ id_list = self._get_id_list_from_ref_list(r.result.get('domains'))
+ self.assertEqual(1, len(id_list))
+ self.assertIn(self.domainA['id'], id_list)
+ self.assertIs(True, r.result.get('domains')[0]['enabled'])
+
+ def test_invalid_filter_is_ignored(self):
+ """GET /domains?enableds&name=myname
+
+ Test Plan:
+
+ - Update policy for no protection on api
+ - Filter by name and 'enableds', which does not exist
+ - Assert 'enableds' is ignored
+
+ """
+ new_policy = {"identity:list_domains": []}
+ self._set_policy(new_policy)
+
+ my_url = '/domains?enableds=0&name=%s' % self.domainA['name']
+ r = self.get(my_url, auth=self.auth)
+ id_list = self._get_id_list_from_ref_list(r.result.get('domains'))
+
+ # domainA is returned and it is enabled, since enableds=0 is not the
+ # same as enabled=0
+ self.assertEqual(1, len(id_list))
+ self.assertIn(self.domainA['id'], id_list)
+ self.assertIs(True, r.result.get('domains')[0]['enabled'])
+
+ def test_list_users_filtered_by_funny_name(self):
+ """GET /users?name=%myname%
+
+ Test Plan:
+
+ - Update policy so api is unprotected
+ - Update a user with name that has filter escape characters
+ - Ensure we can filter on it
+
+ """
+ self._set_policy({"identity:list_users": []})
+ user = self.user1
+ user['name'] = '%my%name%'
+ self.identity_api.update_user(user['id'], user)
+
+ url_by_name = '/users?name=%my%name%'
+ r = self.get(url_by_name, auth=self.auth)
+
+ self.assertEqual(1, len(r.result.get('users')))
+ self.assertEqual(user['id'], r.result.get('users')[0]['id'])
+
+ def test_inexact_filters(self):
+ # Create 20 users
+ user_list = self._create_test_data('user', 20)
+ # Set up some names that we can filter on
+ user = user_list[5]
+ user['name'] = 'The'
+ self.identity_api.update_user(user['id'], user)
+ user = user_list[6]
+ user['name'] = 'The Ministry'
+ self.identity_api.update_user(user['id'], user)
+ user = user_list[7]
+ user['name'] = 'The Ministry of'
+ self.identity_api.update_user(user['id'], user)
+ user = user_list[8]
+ user['name'] = 'The Ministry of Silly'
+ self.identity_api.update_user(user['id'], user)
+ user = user_list[9]
+ user['name'] = 'The Ministry of Silly Walks'
+ self.identity_api.update_user(user['id'], user)
+ # ...and one for useful case insensitivity testing
+ user = user_list[10]
+ user['name'] = 'the ministry of silly walks OF'
+ self.identity_api.update_user(user['id'], user)
+
+ self._set_policy({"identity:list_users": []})
+
+ url_by_name = '/users?name__contains=Ministry'
+ r = self.get(url_by_name, auth=self.auth)
+ self.assertEqual(4, len(r.result.get('users')))
+ self._match_with_list(r.result.get('users'), user_list,
+ list_start=6, list_end=10)
+
+ url_by_name = '/users?name__icontains=miNIstry'
+ r = self.get(url_by_name, auth=self.auth)
+ self.assertEqual(5, len(r.result.get('users')))
+ self._match_with_list(r.result.get('users'), user_list,
+ list_start=6, list_end=11)
+
+ url_by_name = '/users?name__startswith=The'
+ r = self.get(url_by_name, auth=self.auth)
+ self.assertEqual(5, len(r.result.get('users')))
+ self._match_with_list(r.result.get('users'), user_list,
+ list_start=5, list_end=10)
+
+ url_by_name = '/users?name__istartswith=the'
+ r = self.get(url_by_name, auth=self.auth)
+ self.assertEqual(6, len(r.result.get('users')))
+ self._match_with_list(r.result.get('users'), user_list,
+ list_start=5, list_end=11)
+
+ url_by_name = '/users?name__endswith=of'
+ r = self.get(url_by_name, auth=self.auth)
+ self.assertEqual(1, len(r.result.get('users')))
+ self.assertEqual(r.result.get('users')[0]['id'], user_list[7]['id'])
+
+ url_by_name = '/users?name__iendswith=OF'
+ r = self.get(url_by_name, auth=self.auth)
+ self.assertEqual(2, len(r.result.get('users')))
+ self.assertEqual(user_list[7]['id'], r.result.get('users')[0]['id'])
+ self.assertEqual(user_list[10]['id'], r.result.get('users')[1]['id'])
+
+ self._delete_test_data('user', user_list)
+
+ def test_filter_sql_injection_attack(self):
+ """GET /users?name=<injected sql_statement>
+
+ Test Plan:
+
+ - Attempt to get all entities back by passing a two-term attribute
+ - Attempt to piggyback filter to damage DB (e.g. drop table)
+
+ """
+ self._set_policy({"identity:list_users": [],
+ "identity:list_groups": [],
+ "identity:create_group": []})
+
+ url_by_name = "/users?name=anything' or 'x'='x"
+ r = self.get(url_by_name, auth=self.auth)
+
+ self.assertEqual(0, len(r.result.get('users')))
+
+ # See if we can add a SQL command...use the group table instead of the
+ # user table since 'user' is reserved word for SQLAlchemy.
+ group = self.new_group_ref(domain_id=self.domainB['id'])
+ group = self.identity_api.create_group(group)
+
+ url_by_name = "/users?name=x'; drop table group"
+ r = self.get(url_by_name, auth=self.auth)
+
+ # Check group table is still there...
+ url_by_name = "/groups"
+ r = self.get(url_by_name, auth=self.auth)
+ self.assertTrue(len(r.result.get('groups')) > 0)
+
+
+class IdentityTestListLimitCase(IdentityTestFilteredCase):
+ """Test list limiting enforcement on the v3 Identity API."""
+ content_type = 'json'
+
+ def setUp(self):
+ """Setup for Identity Limit Test Cases."""
+
+ super(IdentityTestListLimitCase, self).setUp()
+
+ self._set_policy({"identity:list_users": [],
+ "identity:list_groups": [],
+ "identity:list_projects": [],
+ "identity:list_services": [],
+ "identity:list_policies": []})
+
+ # Create 10 entries for each of the entities we are going to test
+ self.ENTITY_TYPES = ['user', 'group', 'project']
+ self.entity_lists = {}
+ for entity in self.ENTITY_TYPES:
+ self.entity_lists[entity] = self._create_test_data(entity, 10)
+ # Make sure we clean up when finished
+ self.addCleanup(self.clean_up_entity, entity)
+
+ self.service_list = []
+ self.addCleanup(self.clean_up_service)
+ for _ in range(10):
+ new_entity = {'id': uuid.uuid4().hex, 'type': uuid.uuid4().hex}
+ service = self.catalog_api.create_service(new_entity['id'],
+ new_entity)
+ self.service_list.append(service)
+
+ self.policy_list = []
+ self.addCleanup(self.clean_up_policy)
+ for _ in range(10):
+ new_entity = {'id': uuid.uuid4().hex, 'type': uuid.uuid4().hex,
+ 'blob': uuid.uuid4().hex}
+ policy = self.policy_api.create_policy(new_entity['id'],
+ new_entity)
+ self.policy_list.append(policy)
+
+ def clean_up_entity(self, entity):
+ """Clean up entity test data from Identity Limit Test Cases."""
+
+ self._delete_test_data(entity, self.entity_lists[entity])
+
+ def clean_up_service(self):
+ """Clean up service test data from Identity Limit Test Cases."""
+
+ for service in self.service_list:
+ self.catalog_api.delete_service(service['id'])
+
+ def clean_up_policy(self):
+ """Clean up policy test data from Identity Limit Test Cases."""
+
+ for policy in self.policy_list:
+ self.policy_api.delete_policy(policy['id'])
+
+ def _test_entity_list_limit(self, entity, driver):
+ """GET /<entities> (limited)
+
+ Test Plan:
+
+ - For the specified type of entity:
+ - Update policy for no protection on api
+ - Add a bunch of entities
+ - Set the global list limit to 5, and check that getting all
+ - entities only returns 5
+ - Set the driver list_limit to 4, and check that now only 4 are
+ - returned
+
+ """
+ if entity == 'policy':
+ plural = 'policies'
+ else:
+ plural = '%ss' % entity
+
+ self.config_fixture.config(list_limit=5)
+ self.config_fixture.config(group=driver, list_limit=None)
+ r = self.get('/%s' % plural, auth=self.auth)
+ self.assertEqual(5, len(r.result.get(plural)))
+ self.assertIs(r.result.get('truncated'), True)
+
+ self.config_fixture.config(group=driver, list_limit=4)
+ r = self.get('/%s' % plural, auth=self.auth)
+ self.assertEqual(4, len(r.result.get(plural)))
+ self.assertIs(r.result.get('truncated'), True)
+
+ def test_users_list_limit(self):
+ self._test_entity_list_limit('user', 'identity')
+
+ def test_groups_list_limit(self):
+ self._test_entity_list_limit('group', 'identity')
+
+ def test_projects_list_limit(self):
+ self._test_entity_list_limit('project', 'resource')
+
+ def test_services_list_limit(self):
+ self._test_entity_list_limit('service', 'catalog')
+
+ def test_non_driver_list_limit(self):
+ """Check list can be limited without driver level support.
+
+ Policy limiting is not done at the driver level (since it
+ really isn't worth doing it there). So use this as a test
+ for ensuring the controller level will successfully limit
+ in this case.
+
+ """
+ self._test_entity_list_limit('policy', 'policy')
+
+ def test_no_limit(self):
+ """Check truncated attribute not set when list not limited."""
+
+ r = self.get('/services', auth=self.auth)
+ self.assertEqual(10, len(r.result.get('services')))
+ self.assertIsNone(r.result.get('truncated'))
+
+ def test_at_limit(self):
+ """Check truncated attribute not set when list at max size."""
+
+ # Test this by overriding the general limit with a higher
+ # driver-specific limit (allowing all entities to be returned
+ # in the collection), which should result in a non truncated list
+ self.config_fixture.config(list_limit=5)
+ self.config_fixture.config(group='catalog', list_limit=10)
+ r = self.get('/services', auth=self.auth)
+ self.assertEqual(10, len(r.result.get('services')))
+ self.assertIsNone(r.result.get('truncated'))
diff --git a/keystone-moon/keystone/tests/unit/test_v3_identity.py b/keystone-moon/keystone/tests/unit/test_v3_identity.py
new file mode 100644
index 00000000..ac077297
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/test_v3_identity.py
@@ -0,0 +1,584 @@
+# Copyright 2012 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import uuid
+
+from oslo_config import cfg
+from testtools import matchers
+
+from keystone.common import controller
+from keystone import exception
+from keystone.tests import unit as tests
+from keystone.tests.unit import test_v3
+
+
+CONF = cfg.CONF
+
+
+class IdentityTestCase(test_v3.RestfulTestCase):
+ """Test users and groups."""
+
+ def setUp(self):
+ super(IdentityTestCase, self).setUp()
+
+ self.group = self.new_group_ref(
+ domain_id=self.domain_id)
+ self.group = self.identity_api.create_group(self.group)
+ self.group_id = self.group['id']
+
+ self.credential_id = uuid.uuid4().hex
+ self.credential = self.new_credential_ref(
+ user_id=self.user['id'],
+ project_id=self.project_id)
+ self.credential['id'] = self.credential_id
+ self.credential_api.create_credential(
+ self.credential_id,
+ self.credential)
+
+ # user crud tests
+
+ def test_create_user(self):
+ """Call ``POST /users``."""
+ ref = self.new_user_ref(domain_id=self.domain_id)
+ r = self.post(
+ '/users',
+ body={'user': ref})
+ return self.assertValidUserResponse(r, ref)
+
+ def test_create_user_without_domain(self):
+ """Call ``POST /users`` without specifying domain.
+
+ According to the identity-api specification, if you do not
+ explicitly specific the domain_id in the entity, it should
+ take the domain scope of the token as the domain_id.
+
+ """
+ # Create a user with a role on the domain so we can get a
+ # domain scoped token
+ domain = self.new_domain_ref()
+ self.resource_api.create_domain(domain['id'], domain)
+ user = self.new_user_ref(domain_id=domain['id'])
+ password = user['password']
+ user = self.identity_api.create_user(user)
+ user['password'] = password
+ self.assignment_api.create_grant(
+ role_id=self.role_id, user_id=user['id'],
+ domain_id=domain['id'])
+
+ ref = self.new_user_ref(domain_id=domain['id'])
+ ref_nd = ref.copy()
+ ref_nd.pop('domain_id')
+ auth = self.build_authentication_request(
+ user_id=user['id'],
+ password=user['password'],
+ domain_id=domain['id'])
+ r = self.post('/users', body={'user': ref_nd}, auth=auth)
+ self.assertValidUserResponse(r, ref)
+
+ # Now try the same thing without a domain token - which should fail
+ ref = self.new_user_ref(domain_id=domain['id'])
+ ref_nd = ref.copy()
+ ref_nd.pop('domain_id')
+ auth = self.build_authentication_request(
+ user_id=self.user['id'],
+ password=self.user['password'],
+ project_id=self.project['id'])
+ r = self.post('/users', body={'user': ref_nd}, auth=auth)
+ # TODO(henry-nash): Due to bug #1283539 we currently automatically
+ # use the default domain_id if a domain scoped token is not being
+ # used. Change the code below to expect a failure once this bug is
+ # fixed.
+ ref['domain_id'] = CONF.identity.default_domain_id
+ return self.assertValidUserResponse(r, ref)
+
+ def test_create_user_400(self):
+ """Call ``POST /users``."""
+ self.post('/users', body={'user': {}}, expected_status=400)
+
+ def test_list_users(self):
+ """Call ``GET /users``."""
+ resource_url = '/users'
+ r = self.get(resource_url)
+ self.assertValidUserListResponse(r, ref=self.user,
+ resource_url=resource_url)
+
+ def test_list_users_with_multiple_backends(self):
+ """Call ``GET /users`` when multiple backends is enabled.
+
+ In this scenario, the controller requires a domain to be specified
+ either as a filter or by using a domain scoped token.
+
+ """
+ self.config_fixture.config(group='identity',
+ domain_specific_drivers_enabled=True)
+
+ # Create a user with a role on the domain so we can get a
+ # domain scoped token
+ domain = self.new_domain_ref()
+ self.resource_api.create_domain(domain['id'], domain)
+ user = self.new_user_ref(domain_id=domain['id'])
+ password = user['password']
+ user = self.identity_api.create_user(user)
+ user['password'] = password
+ self.assignment_api.create_grant(
+ role_id=self.role_id, user_id=user['id'],
+ domain_id=domain['id'])
+
+ ref = self.new_user_ref(domain_id=domain['id'])
+ ref_nd = ref.copy()
+ ref_nd.pop('domain_id')
+ auth = self.build_authentication_request(
+ user_id=user['id'],
+ password=user['password'],
+ domain_id=domain['id'])
+
+ # First try using a domain scoped token
+ resource_url = '/users'
+ r = self.get(resource_url, auth=auth)
+ self.assertValidUserListResponse(r, ref=user,
+ resource_url=resource_url)
+
+ # Now try with an explicit filter
+ resource_url = ('/users?domain_id=%(domain_id)s' %
+ {'domain_id': domain['id']})
+ r = self.get(resource_url)
+ self.assertValidUserListResponse(r, ref=user,
+ resource_url=resource_url)
+
+ # Now try the same thing without a domain token or filter,
+ # which should fail
+ r = self.get('/users', expected_status=exception.Unauthorized.code)
+
+ def test_list_users_with_static_admin_token_and_multiple_backends(self):
+ # domain-specific operations with the bootstrap ADMIN token is
+ # disallowed when domain-specific drivers are enabled
+ self.config_fixture.config(group='identity',
+ domain_specific_drivers_enabled=True)
+ self.get('/users', token=CONF.admin_token,
+ expected_status=exception.Unauthorized.code)
+
+ def test_list_users_no_default_project(self):
+ """Call ``GET /users`` making sure no default_project_id."""
+ user = self.new_user_ref(self.domain_id)
+ user = self.identity_api.create_user(user)
+ resource_url = '/users'
+ r = self.get(resource_url)
+ self.assertValidUserListResponse(r, ref=user,
+ resource_url=resource_url)
+
+ def test_get_user(self):
+ """Call ``GET /users/{user_id}``."""
+ r = self.get('/users/%(user_id)s' % {
+ 'user_id': self.user['id']})
+ self.assertValidUserResponse(r, self.user)
+
+ def test_get_user_with_default_project(self):
+ """Call ``GET /users/{user_id}`` making sure of default_project_id."""
+ user = self.new_user_ref(domain_id=self.domain_id,
+ project_id=self.project_id)
+ user = self.identity_api.create_user(user)
+ r = self.get('/users/%(user_id)s' % {'user_id': user['id']})
+ self.assertValidUserResponse(r, user)
+
+ def test_add_user_to_group(self):
+ """Call ``PUT /groups/{group_id}/users/{user_id}``."""
+ self.put('/groups/%(group_id)s/users/%(user_id)s' % {
+ 'group_id': self.group_id, 'user_id': self.user['id']})
+
+ def test_list_groups_for_user(self):
+ """Call ``GET /users/{user_id}/groups``."""
+
+ self.user1 = self.new_user_ref(
+ domain_id=self.domain['id'])
+ password = self.user1['password']
+ self.user1 = self.identity_api.create_user(self.user1)
+ self.user1['password'] = password
+ self.user2 = self.new_user_ref(
+ domain_id=self.domain['id'])
+ password = self.user2['password']
+ self.user2 = self.identity_api.create_user(self.user2)
+ self.user2['password'] = password
+ self.put('/groups/%(group_id)s/users/%(user_id)s' % {
+ 'group_id': self.group_id, 'user_id': self.user1['id']})
+
+ # Scenarios below are written to test the default policy configuration
+
+ # One should be allowed to list one's own groups
+ auth = self.build_authentication_request(
+ user_id=self.user1['id'],
+ password=self.user1['password'])
+ resource_url = ('/users/%(user_id)s/groups' %
+ {'user_id': self.user1['id']})
+ r = self.get(resource_url, auth=auth)
+ self.assertValidGroupListResponse(r, ref=self.group,
+ resource_url=resource_url)
+
+ # Administrator is allowed to list others' groups
+ resource_url = ('/users/%(user_id)s/groups' %
+ {'user_id': self.user1['id']})
+ r = self.get(resource_url)
+ self.assertValidGroupListResponse(r, ref=self.group,
+ resource_url=resource_url)
+
+ # Ordinary users should not be allowed to list other's groups
+ auth = self.build_authentication_request(
+ user_id=self.user2['id'],
+ password=self.user2['password'])
+ r = self.get('/users/%(user_id)s/groups' % {
+ 'user_id': self.user1['id']}, auth=auth,
+ expected_status=exception.ForbiddenAction.code)
+
+ def test_check_user_in_group(self):
+ """Call ``HEAD /groups/{group_id}/users/{user_id}``."""
+ self.put('/groups/%(group_id)s/users/%(user_id)s' % {
+ 'group_id': self.group_id, 'user_id': self.user['id']})
+ self.head('/groups/%(group_id)s/users/%(user_id)s' % {
+ 'group_id': self.group_id, 'user_id': self.user['id']})
+
+ def test_list_users_in_group(self):
+ """Call ``GET /groups/{group_id}/users``."""
+ self.put('/groups/%(group_id)s/users/%(user_id)s' % {
+ 'group_id': self.group_id, 'user_id': self.user['id']})
+ resource_url = ('/groups/%(group_id)s/users' %
+ {'group_id': self.group_id})
+ r = self.get(resource_url)
+ self.assertValidUserListResponse(r, ref=self.user,
+ resource_url=resource_url)
+ self.assertIn('/groups/%(group_id)s/users' % {
+ 'group_id': self.group_id}, r.result['links']['self'])
+
+ def test_remove_user_from_group(self):
+ """Call ``DELETE /groups/{group_id}/users/{user_id}``."""
+ self.put('/groups/%(group_id)s/users/%(user_id)s' % {
+ 'group_id': self.group_id, 'user_id': self.user['id']})
+ self.delete('/groups/%(group_id)s/users/%(user_id)s' % {
+ 'group_id': self.group_id, 'user_id': self.user['id']})
+
+ def test_update_user(self):
+ """Call ``PATCH /users/{user_id}``."""
+ user = self.new_user_ref(domain_id=self.domain_id)
+ del user['id']
+ r = self.patch('/users/%(user_id)s' % {
+ 'user_id': self.user['id']},
+ body={'user': user})
+ self.assertValidUserResponse(r, user)
+
+ def test_admin_password_reset(self):
+ # bootstrap a user as admin
+ user_ref = self.new_user_ref(domain_id=self.domain['id'])
+ password = user_ref['password']
+ user_ref = self.identity_api.create_user(user_ref)
+
+ # auth as user should work before a password change
+ old_password_auth = self.build_authentication_request(
+ user_id=user_ref['id'],
+ password=password)
+ r = self.v3_authenticate_token(old_password_auth, expected_status=201)
+ old_token = r.headers.get('X-Subject-Token')
+
+ # auth as user with a token should work before a password change
+ old_token_auth = self.build_authentication_request(token=old_token)
+ self.v3_authenticate_token(old_token_auth, expected_status=201)
+
+ # administrative password reset
+ new_password = uuid.uuid4().hex
+ self.patch('/users/%s' % user_ref['id'],
+ body={'user': {'password': new_password}},
+ expected_status=200)
+
+ # auth as user with original password should not work after change
+ self.v3_authenticate_token(old_password_auth, expected_status=401)
+
+ # auth as user with an old token should not work after change
+ self.v3_authenticate_token(old_token_auth, expected_status=404)
+
+ # new password should work
+ new_password_auth = self.build_authentication_request(
+ user_id=user_ref['id'],
+ password=new_password)
+ self.v3_authenticate_token(new_password_auth, expected_status=201)
+
+ def test_update_user_domain_id(self):
+ """Call ``PATCH /users/{user_id}`` with domain_id."""
+ user = self.new_user_ref(domain_id=self.domain['id'])
+ user = self.identity_api.create_user(user)
+ user['domain_id'] = CONF.identity.default_domain_id
+ r = self.patch('/users/%(user_id)s' % {
+ 'user_id': user['id']},
+ body={'user': user},
+ expected_status=exception.ValidationError.code)
+ self.config_fixture.config(domain_id_immutable=False)
+ user['domain_id'] = self.domain['id']
+ r = self.patch('/users/%(user_id)s' % {
+ 'user_id': user['id']},
+ body={'user': user})
+ self.assertValidUserResponse(r, user)
+
+ def test_delete_user(self):
+ """Call ``DELETE /users/{user_id}``.
+
+ As well as making sure the delete succeeds, we ensure
+ that any credentials that reference this user are
+ also deleted, while other credentials are unaffected.
+ In addition, no tokens should remain valid for this user.
+
+ """
+ # First check the credential for this user is present
+ r = self.credential_api.get_credential(self.credential['id'])
+ self.assertDictEqual(r, self.credential)
+ # Create a second credential with a different user
+ self.user2 = self.new_user_ref(
+ domain_id=self.domain['id'],
+ project_id=self.project['id'])
+ self.user2 = self.identity_api.create_user(self.user2)
+ self.credential2 = self.new_credential_ref(
+ user_id=self.user2['id'],
+ project_id=self.project['id'])
+ self.credential_api.create_credential(
+ self.credential2['id'],
+ self.credential2)
+ # Create a token for this user which we can check later
+ # gets deleted
+ auth_data = self.build_authentication_request(
+ user_id=self.user['id'],
+ password=self.user['password'],
+ project_id=self.project['id'])
+ token = self.get_requested_token(auth_data)
+ # Confirm token is valid for now
+ self.head('/auth/tokens',
+ headers={'X-Subject-Token': token},
+ expected_status=200)
+
+ # Now delete the user
+ self.delete('/users/%(user_id)s' % {
+ 'user_id': self.user['id']})
+
+ # Deleting the user should have deleted any credentials
+ # that reference this project
+ self.assertRaises(exception.CredentialNotFound,
+ self.credential_api.get_credential,
+ self.credential['id'])
+ # And the no tokens we remain valid
+ tokens = self.token_provider_api._persistence._list_tokens(
+ self.user['id'])
+ self.assertEqual(0, len(tokens))
+ # But the credential for user2 is unaffected
+ r = self.credential_api.get_credential(self.credential2['id'])
+ self.assertDictEqual(r, self.credential2)
+
+ # group crud tests
+
+ def test_create_group(self):
+ """Call ``POST /groups``."""
+ ref = self.new_group_ref(domain_id=self.domain_id)
+ r = self.post(
+ '/groups',
+ body={'group': ref})
+ return self.assertValidGroupResponse(r, ref)
+
+ def test_create_group_400(self):
+ """Call ``POST /groups``."""
+ self.post('/groups', body={'group': {}}, expected_status=400)
+
+ def test_list_groups(self):
+ """Call ``GET /groups``."""
+ resource_url = '/groups'
+ r = self.get(resource_url)
+ self.assertValidGroupListResponse(r, ref=self.group,
+ resource_url=resource_url)
+
+ def test_get_group(self):
+ """Call ``GET /groups/{group_id}``."""
+ r = self.get('/groups/%(group_id)s' % {
+ 'group_id': self.group_id})
+ self.assertValidGroupResponse(r, self.group)
+
+ def test_update_group(self):
+ """Call ``PATCH /groups/{group_id}``."""
+ group = self.new_group_ref(domain_id=self.domain_id)
+ del group['id']
+ r = self.patch('/groups/%(group_id)s' % {
+ 'group_id': self.group_id},
+ body={'group': group})
+ self.assertValidGroupResponse(r, group)
+
+ def test_update_group_domain_id(self):
+ """Call ``PATCH /groups/{group_id}`` with domain_id."""
+ group = self.new_group_ref(domain_id=self.domain['id'])
+ group = self.identity_api.create_group(group)
+ group['domain_id'] = CONF.identity.default_domain_id
+ r = self.patch('/groups/%(group_id)s' % {
+ 'group_id': group['id']},
+ body={'group': group},
+ expected_status=exception.ValidationError.code)
+ self.config_fixture.config(domain_id_immutable=False)
+ group['domain_id'] = self.domain['id']
+ r = self.patch('/groups/%(group_id)s' % {
+ 'group_id': group['id']},
+ body={'group': group})
+ self.assertValidGroupResponse(r, group)
+
+ def test_delete_group(self):
+ """Call ``DELETE /groups/{group_id}``."""
+ self.delete('/groups/%(group_id)s' % {
+ 'group_id': self.group_id})
+
+
+class IdentityV3toV2MethodsTestCase(tests.TestCase):
+ """Test users V3 to V2 conversion methods."""
+
+ def setUp(self):
+ super(IdentityV3toV2MethodsTestCase, self).setUp()
+ self.load_backends()
+ self.user_id = uuid.uuid4().hex
+ self.default_project_id = uuid.uuid4().hex
+ self.tenant_id = uuid.uuid4().hex
+ self.domain_id = uuid.uuid4().hex
+ # User with only default_project_id in ref
+ self.user1 = {'id': self.user_id,
+ 'name': self.user_id,
+ 'default_project_id': self.default_project_id,
+ 'domain_id': self.domain_id}
+ # User without default_project_id or tenantId in ref
+ self.user2 = {'id': self.user_id,
+ 'name': self.user_id,
+ 'domain_id': self.domain_id}
+ # User with both tenantId and default_project_id in ref
+ self.user3 = {'id': self.user_id,
+ 'name': self.user_id,
+ 'default_project_id': self.default_project_id,
+ 'tenantId': self.tenant_id,
+ 'domain_id': self.domain_id}
+ # User with only tenantId in ref
+ self.user4 = {'id': self.user_id,
+ 'name': self.user_id,
+ 'tenantId': self.tenant_id,
+ 'domain_id': self.domain_id}
+
+ # Expected result if the user is meant to have a tenantId element
+ self.expected_user = {'id': self.user_id,
+ 'name': self.user_id,
+ 'username': self.user_id,
+ 'tenantId': self.default_project_id}
+
+ # Expected result if the user is not meant to have a tenantId element
+ self.expected_user_no_tenant_id = {'id': self.user_id,
+ 'name': self.user_id,
+ 'username': self.user_id}
+
+ def test_v3_to_v2_user_method(self):
+
+ updated_user1 = controller.V2Controller.v3_to_v2_user(self.user1)
+ self.assertIs(self.user1, updated_user1)
+ self.assertDictEqual(self.user1, self.expected_user)
+ updated_user2 = controller.V2Controller.v3_to_v2_user(self.user2)
+ self.assertIs(self.user2, updated_user2)
+ self.assertDictEqual(self.user2, self.expected_user_no_tenant_id)
+ updated_user3 = controller.V2Controller.v3_to_v2_user(self.user3)
+ self.assertIs(self.user3, updated_user3)
+ self.assertDictEqual(self.user3, self.expected_user)
+ updated_user4 = controller.V2Controller.v3_to_v2_user(self.user4)
+ self.assertIs(self.user4, updated_user4)
+ self.assertDictEqual(self.user4, self.expected_user_no_tenant_id)
+
+ def test_v3_to_v2_user_method_list(self):
+ user_list = [self.user1, self.user2, self.user3, self.user4]
+ updated_list = controller.V2Controller.v3_to_v2_user(user_list)
+
+ self.assertEqual(len(updated_list), len(user_list))
+
+ for i, ref in enumerate(updated_list):
+ # Order should not change.
+ self.assertIs(ref, user_list[i])
+
+ self.assertDictEqual(self.user1, self.expected_user)
+ self.assertDictEqual(self.user2, self.expected_user_no_tenant_id)
+ self.assertDictEqual(self.user3, self.expected_user)
+ self.assertDictEqual(self.user4, self.expected_user_no_tenant_id)
+
+
+class UserSelfServiceChangingPasswordsTestCase(test_v3.RestfulTestCase):
+
+ def setUp(self):
+ super(UserSelfServiceChangingPasswordsTestCase, self).setUp()
+ self.user_ref = self.new_user_ref(domain_id=self.domain['id'])
+ password = self.user_ref['password']
+ self.user_ref = self.identity_api.create_user(self.user_ref)
+ self.user_ref['password'] = password
+ self.token = self.get_request_token(self.user_ref['password'], 201)
+
+ def get_request_token(self, password, expected_status):
+ auth_data = self.build_authentication_request(
+ user_id=self.user_ref['id'],
+ password=password)
+ r = self.v3_authenticate_token(auth_data,
+ expected_status=expected_status)
+ return r.headers.get('X-Subject-Token')
+
+ def change_password(self, expected_status, **kwargs):
+ """Returns a test response for a change password request."""
+ return self.post('/users/%s/password' % self.user_ref['id'],
+ body={'user': kwargs},
+ token=self.token,
+ expected_status=expected_status)
+
+ def test_changing_password(self):
+ # original password works
+ token_id = self.get_request_token(self.user_ref['password'],
+ expected_status=201)
+ # original token works
+ old_token_auth = self.build_authentication_request(token=token_id)
+ self.v3_authenticate_token(old_token_auth, expected_status=201)
+
+ # change password
+ new_password = uuid.uuid4().hex
+ self.change_password(password=new_password,
+ original_password=self.user_ref['password'],
+ expected_status=204)
+
+ # old password fails
+ self.get_request_token(self.user_ref['password'], expected_status=401)
+
+ # old token fails
+ self.v3_authenticate_token(old_token_auth, expected_status=404)
+
+ # new password works
+ self.get_request_token(new_password, expected_status=201)
+
+ def test_changing_password_with_missing_original_password_fails(self):
+ r = self.change_password(password=uuid.uuid4().hex,
+ expected_status=400)
+ self.assertThat(r.result['error']['message'],
+ matchers.Contains('original_password'))
+
+ def test_changing_password_with_missing_password_fails(self):
+ r = self.change_password(original_password=self.user_ref['password'],
+ expected_status=400)
+ self.assertThat(r.result['error']['message'],
+ matchers.Contains('password'))
+
+ def test_changing_password_with_incorrect_password_fails(self):
+ self.change_password(password=uuid.uuid4().hex,
+ original_password=uuid.uuid4().hex,
+ expected_status=401)
+
+ def test_changing_password_with_disabled_user_fails(self):
+ # disable the user account
+ self.user_ref['enabled'] = False
+ self.patch('/users/%s' % self.user_ref['id'],
+ body={'user': self.user_ref})
+
+ self.change_password(password=uuid.uuid4().hex,
+ original_password=self.user_ref['password'],
+ expected_status=401)
diff --git a/keystone-moon/keystone/tests/unit/test_v3_oauth1.py b/keystone-moon/keystone/tests/unit/test_v3_oauth1.py
new file mode 100644
index 00000000..608162d8
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/test_v3_oauth1.py
@@ -0,0 +1,891 @@
+# Copyright 2013 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import copy
+import uuid
+
+from oslo_config import cfg
+from oslo_serialization import jsonutils
+from pycadf import cadftaxonomy
+from six.moves import urllib
+
+from keystone.contrib import oauth1
+from keystone.contrib.oauth1 import controllers
+from keystone.contrib.oauth1 import core
+from keystone import exception
+from keystone.tests.unit.common import test_notifications
+from keystone.tests.unit.ksfixtures import temporaryfile
+from keystone.tests.unit import test_v3
+
+
+CONF = cfg.CONF
+
+
+class OAuth1Tests(test_v3.RestfulTestCase):
+
+ EXTENSION_NAME = 'oauth1'
+ EXTENSION_TO_ADD = 'oauth1_extension'
+
+ CONSUMER_URL = '/OS-OAUTH1/consumers'
+
+ def setUp(self):
+ super(OAuth1Tests, self).setUp()
+
+ # Now that the app has been served, we can query CONF values
+ self.base_url = 'http://localhost/v3'
+ self.controller = controllers.OAuthControllerV3()
+
+ def _create_single_consumer(self):
+ ref = {'description': uuid.uuid4().hex}
+ resp = self.post(
+ self.CONSUMER_URL,
+ body={'consumer': ref})
+ return resp.result['consumer']
+
+ def _create_request_token(self, consumer, project_id):
+ endpoint = '/OS-OAUTH1/request_token'
+ client = oauth1.Client(consumer['key'],
+ client_secret=consumer['secret'],
+ signature_method=oauth1.SIG_HMAC,
+ callback_uri="oob")
+ headers = {'requested_project_id': project_id}
+ url, headers, body = client.sign(self.base_url + endpoint,
+ http_method='POST',
+ headers=headers)
+ return endpoint, headers
+
+ def _create_access_token(self, consumer, token):
+ endpoint = '/OS-OAUTH1/access_token'
+ client = oauth1.Client(consumer['key'],
+ client_secret=consumer['secret'],
+ resource_owner_key=token.key,
+ resource_owner_secret=token.secret,
+ signature_method=oauth1.SIG_HMAC,
+ verifier=token.verifier)
+ url, headers, body = client.sign(self.base_url + endpoint,
+ http_method='POST')
+ headers.update({'Content-Type': 'application/json'})
+ return endpoint, headers
+
+ def _get_oauth_token(self, consumer, token):
+ client = oauth1.Client(consumer['key'],
+ client_secret=consumer['secret'],
+ resource_owner_key=token.key,
+ resource_owner_secret=token.secret,
+ signature_method=oauth1.SIG_HMAC)
+ endpoint = '/auth/tokens'
+ url, headers, body = client.sign(self.base_url + endpoint,
+ http_method='POST')
+ headers.update({'Content-Type': 'application/json'})
+ ref = {'auth': {'identity': {'oauth1': {}, 'methods': ['oauth1']}}}
+ return endpoint, headers, ref
+
+ def _authorize_request_token(self, request_id):
+ return '/OS-OAUTH1/authorize/%s' % (request_id)
+
+
+class ConsumerCRUDTests(OAuth1Tests):
+
+ def _consumer_create(self, description=None, description_flag=True,
+ **kwargs):
+ if description_flag:
+ ref = {'description': description}
+ else:
+ ref = {}
+ if kwargs:
+ ref.update(kwargs)
+ resp = self.post(
+ self.CONSUMER_URL,
+ body={'consumer': ref})
+ consumer = resp.result['consumer']
+ consumer_id = consumer['id']
+ self.assertEqual(description, consumer['description'])
+ self.assertIsNotNone(consumer_id)
+ self.assertIsNotNone(consumer['secret'])
+ return consumer
+
+ def test_consumer_create(self):
+ description = uuid.uuid4().hex
+ self._consumer_create(description=description)
+
+ def test_consumer_create_none_desc_1(self):
+ self._consumer_create()
+
+ def test_consumer_create_none_desc_2(self):
+ self._consumer_create(description_flag=False)
+
+ def test_consumer_create_normalize_field(self):
+ # If create a consumer with a field with : or - in the name,
+ # the name is normalized by converting those chars to _.
+ field_name = 'some:weird-field'
+ field_value = uuid.uuid4().hex
+ extra_fields = {field_name: field_value}
+ consumer = self._consumer_create(**extra_fields)
+ normalized_field_name = 'some_weird_field'
+ self.assertEqual(field_value, consumer[normalized_field_name])
+
+ def test_consumer_delete(self):
+ consumer = self._create_single_consumer()
+ consumer_id = consumer['id']
+ resp = self.delete(self.CONSUMER_URL + '/%s' % consumer_id)
+ self.assertResponseStatus(resp, 204)
+
+ def test_consumer_get(self):
+ consumer = self._create_single_consumer()
+ consumer_id = consumer['id']
+ resp = self.get(self.CONSUMER_URL + '/%s' % consumer_id)
+ self_url = ['http://localhost/v3', self.CONSUMER_URL,
+ '/', consumer_id]
+ self_url = ''.join(self_url)
+ self.assertEqual(self_url, resp.result['consumer']['links']['self'])
+ self.assertEqual(consumer_id, resp.result['consumer']['id'])
+
+ def test_consumer_list(self):
+ self._consumer_create()
+ resp = self.get(self.CONSUMER_URL)
+ entities = resp.result['consumers']
+ self.assertIsNotNone(entities)
+ self_url = ['http://localhost/v3', self.CONSUMER_URL]
+ self_url = ''.join(self_url)
+ self.assertEqual(self_url, resp.result['links']['self'])
+ self.assertValidListLinks(resp.result['links'])
+
+ def test_consumer_update(self):
+ consumer = self._create_single_consumer()
+ original_id = consumer['id']
+ original_description = consumer['description']
+ update_description = original_description + '_new'
+
+ update_ref = {'description': update_description}
+ update_resp = self.patch(self.CONSUMER_URL + '/%s' % original_id,
+ body={'consumer': update_ref})
+ consumer = update_resp.result['consumer']
+ self.assertEqual(update_description, consumer['description'])
+ self.assertEqual(original_id, consumer['id'])
+
+ def test_consumer_update_bad_secret(self):
+ consumer = self._create_single_consumer()
+ original_id = consumer['id']
+ update_ref = copy.deepcopy(consumer)
+ update_ref['description'] = uuid.uuid4().hex
+ update_ref['secret'] = uuid.uuid4().hex
+ self.patch(self.CONSUMER_URL + '/%s' % original_id,
+ body={'consumer': update_ref},
+ expected_status=400)
+
+ def test_consumer_update_bad_id(self):
+ consumer = self._create_single_consumer()
+ original_id = consumer['id']
+ original_description = consumer['description']
+ update_description = original_description + "_new"
+
+ update_ref = copy.deepcopy(consumer)
+ update_ref['description'] = update_description
+ update_ref['id'] = update_description
+ self.patch(self.CONSUMER_URL + '/%s' % original_id,
+ body={'consumer': update_ref},
+ expected_status=400)
+
+ def test_consumer_update_normalize_field(self):
+ # If update a consumer with a field with : or - in the name,
+ # the name is normalized by converting those chars to _.
+ field1_name = 'some:weird-field'
+ field1_orig_value = uuid.uuid4().hex
+
+ extra_fields = {field1_name: field1_orig_value}
+ consumer = self._consumer_create(**extra_fields)
+ consumer_id = consumer['id']
+
+ field1_new_value = uuid.uuid4().hex
+
+ field2_name = 'weird:some-field'
+ field2_value = uuid.uuid4().hex
+
+ update_ref = {field1_name: field1_new_value,
+ field2_name: field2_value}
+
+ update_resp = self.patch(self.CONSUMER_URL + '/%s' % consumer_id,
+ body={'consumer': update_ref})
+ consumer = update_resp.result['consumer']
+
+ normalized_field1_name = 'some_weird_field'
+ self.assertEqual(field1_new_value, consumer[normalized_field1_name])
+
+ normalized_field2_name = 'weird_some_field'
+ self.assertEqual(field2_value, consumer[normalized_field2_name])
+
+ def test_consumer_create_no_description(self):
+ resp = self.post(self.CONSUMER_URL, body={'consumer': {}})
+ consumer = resp.result['consumer']
+ consumer_id = consumer['id']
+ self.assertIsNone(consumer['description'])
+ self.assertIsNotNone(consumer_id)
+ self.assertIsNotNone(consumer['secret'])
+
+ def test_consumer_get_bad_id(self):
+ self.get(self.CONSUMER_URL + '/%(consumer_id)s'
+ % {'consumer_id': uuid.uuid4().hex},
+ expected_status=404)
+
+
+class OAuthFlowTests(OAuth1Tests):
+
+ def auth_plugin_config_override(self):
+ methods = ['password', 'token', 'oauth1']
+ method_classes = {
+ 'password': 'keystone.auth.plugins.password.Password',
+ 'token': 'keystone.auth.plugins.token.Token',
+ 'oauth1': 'keystone.auth.plugins.oauth1.OAuth',
+ }
+ super(OAuthFlowTests, self).auth_plugin_config_override(
+ methods, **method_classes)
+
+ def test_oauth_flow(self):
+ consumer = self._create_single_consumer()
+ consumer_id = consumer['id']
+ consumer_secret = consumer['secret']
+ self.consumer = {'key': consumer_id, 'secret': consumer_secret}
+ self.assertIsNotNone(self.consumer['secret'])
+
+ url, headers = self._create_request_token(self.consumer,
+ self.project_id)
+ content = self.post(
+ url, headers=headers,
+ response_content_type='application/x-www-urlformencoded')
+ credentials = urllib.parse.parse_qs(content.result)
+ request_key = credentials['oauth_token'][0]
+ request_secret = credentials['oauth_token_secret'][0]
+ self.request_token = oauth1.Token(request_key, request_secret)
+ self.assertIsNotNone(self.request_token.key)
+
+ url = self._authorize_request_token(request_key)
+ body = {'roles': [{'id': self.role_id}]}
+ resp = self.put(url, body=body, expected_status=200)
+ self.verifier = resp.result['token']['oauth_verifier']
+ self.assertTrue(all(i in core.VERIFIER_CHARS for i in self.verifier))
+ self.assertEqual(8, len(self.verifier))
+
+ self.request_token.set_verifier(self.verifier)
+ url, headers = self._create_access_token(self.consumer,
+ self.request_token)
+ content = self.post(
+ url, headers=headers,
+ response_content_type='application/x-www-urlformencoded')
+ credentials = urllib.parse.parse_qs(content.result)
+ access_key = credentials['oauth_token'][0]
+ access_secret = credentials['oauth_token_secret'][0]
+ self.access_token = oauth1.Token(access_key, access_secret)
+ self.assertIsNotNone(self.access_token.key)
+
+ url, headers, body = self._get_oauth_token(self.consumer,
+ self.access_token)
+ content = self.post(url, headers=headers, body=body)
+ self.keystone_token_id = content.headers['X-Subject-Token']
+ self.keystone_token = content.result['token']
+ self.assertIsNotNone(self.keystone_token_id)
+
+
+class AccessTokenCRUDTests(OAuthFlowTests):
+ def test_delete_access_token_dne(self):
+ self.delete('/users/%(user)s/OS-OAUTH1/access_tokens/%(auth)s'
+ % {'user': self.user_id,
+ 'auth': uuid.uuid4().hex},
+ expected_status=404)
+
+ def test_list_no_access_tokens(self):
+ resp = self.get('/users/%(user_id)s/OS-OAUTH1/access_tokens'
+ % {'user_id': self.user_id})
+ entities = resp.result['access_tokens']
+ self.assertEqual([], entities)
+ self.assertValidListLinks(resp.result['links'])
+
+ def test_get_single_access_token(self):
+ self.test_oauth_flow()
+ url = '/users/%(user_id)s/OS-OAUTH1/access_tokens/%(key)s' % {
+ 'user_id': self.user_id,
+ 'key': self.access_token.key
+ }
+ resp = self.get(url)
+ entity = resp.result['access_token']
+ self.assertEqual(self.access_token.key, entity['id'])
+ self.assertEqual(self.consumer['key'], entity['consumer_id'])
+ self.assertEqual('http://localhost/v3' + url, entity['links']['self'])
+
+ def test_get_access_token_dne(self):
+ self.get('/users/%(user_id)s/OS-OAUTH1/access_tokens/%(key)s'
+ % {'user_id': self.user_id,
+ 'key': uuid.uuid4().hex},
+ expected_status=404)
+
+ def test_list_all_roles_in_access_token(self):
+ self.test_oauth_flow()
+ resp = self.get('/users/%(id)s/OS-OAUTH1/access_tokens/%(key)s/roles'
+ % {'id': self.user_id,
+ 'key': self.access_token.key})
+ entities = resp.result['roles']
+ self.assertTrue(entities)
+ self.assertValidListLinks(resp.result['links'])
+
+ def test_get_role_in_access_token(self):
+ self.test_oauth_flow()
+ url = ('/users/%(id)s/OS-OAUTH1/access_tokens/%(key)s/roles/%(role)s'
+ % {'id': self.user_id, 'key': self.access_token.key,
+ 'role': self.role_id})
+ resp = self.get(url)
+ entity = resp.result['role']
+ self.assertEqual(self.role_id, entity['id'])
+
+ def test_get_role_in_access_token_dne(self):
+ self.test_oauth_flow()
+ url = ('/users/%(id)s/OS-OAUTH1/access_tokens/%(key)s/roles/%(role)s'
+ % {'id': self.user_id, 'key': self.access_token.key,
+ 'role': uuid.uuid4().hex})
+ self.get(url, expected_status=404)
+
+ def test_list_and_delete_access_tokens(self):
+ self.test_oauth_flow()
+ # List access_tokens should be > 0
+ resp = self.get('/users/%(user_id)s/OS-OAUTH1/access_tokens'
+ % {'user_id': self.user_id})
+ entities = resp.result['access_tokens']
+ self.assertTrue(entities)
+ self.assertValidListLinks(resp.result['links'])
+
+ # Delete access_token
+ resp = self.delete('/users/%(user)s/OS-OAUTH1/access_tokens/%(auth)s'
+ % {'user': self.user_id,
+ 'auth': self.access_token.key})
+ self.assertResponseStatus(resp, 204)
+
+ # List access_token should be 0
+ resp = self.get('/users/%(user_id)s/OS-OAUTH1/access_tokens'
+ % {'user_id': self.user_id})
+ entities = resp.result['access_tokens']
+ self.assertEqual([], entities)
+ self.assertValidListLinks(resp.result['links'])
+
+
+class AuthTokenTests(OAuthFlowTests):
+
+ def test_keystone_token_is_valid(self):
+ self.test_oauth_flow()
+ headers = {'X-Subject-Token': self.keystone_token_id,
+ 'X-Auth-Token': self.keystone_token_id}
+ r = self.get('/auth/tokens', headers=headers)
+ self.assertValidTokenResponse(r, self.user)
+
+ # now verify the oauth section
+ oauth_section = r.result['token']['OS-OAUTH1']
+ self.assertEqual(self.access_token.key,
+ oauth_section['access_token_id'])
+ self.assertEqual(self.consumer['key'], oauth_section['consumer_id'])
+
+ # verify the roles section
+ roles_list = r.result['token']['roles']
+ # we can just verify the 0th role since we are only assigning one role
+ self.assertEqual(self.role_id, roles_list[0]['id'])
+
+ # verify that the token can perform delegated tasks
+ ref = self.new_user_ref(domain_id=self.domain_id)
+ r = self.admin_request(path='/v3/users', headers=headers,
+ method='POST', body={'user': ref})
+ self.assertValidUserResponse(r, ref)
+
+ def test_delete_access_token_also_revokes_token(self):
+ self.test_oauth_flow()
+
+ # Delete access token
+ resp = self.delete('/users/%(user)s/OS-OAUTH1/access_tokens/%(auth)s'
+ % {'user': self.user_id,
+ 'auth': self.access_token.key})
+ self.assertResponseStatus(resp, 204)
+
+ # Check Keystone Token no longer exists
+ headers = {'X-Subject-Token': self.keystone_token_id,
+ 'X-Auth-Token': self.keystone_token_id}
+ self.get('/auth/tokens', headers=headers,
+ expected_status=404)
+
+ def test_deleting_consumer_also_deletes_tokens(self):
+ self.test_oauth_flow()
+
+ # Delete consumer
+ consumer_id = self.consumer['key']
+ resp = self.delete('/OS-OAUTH1/consumers/%(consumer_id)s'
+ % {'consumer_id': consumer_id})
+ self.assertResponseStatus(resp, 204)
+
+ # List access_token should be 0
+ resp = self.get('/users/%(user_id)s/OS-OAUTH1/access_tokens'
+ % {'user_id': self.user_id})
+ entities = resp.result['access_tokens']
+ self.assertEqual([], entities)
+
+ # Check Keystone Token no longer exists
+ headers = {'X-Subject-Token': self.keystone_token_id,
+ 'X-Auth-Token': self.keystone_token_id}
+ self.head('/auth/tokens', headers=headers,
+ expected_status=404)
+
+ def test_change_user_password_also_deletes_tokens(self):
+ self.test_oauth_flow()
+
+ # delegated keystone token exists
+ headers = {'X-Subject-Token': self.keystone_token_id,
+ 'X-Auth-Token': self.keystone_token_id}
+ r = self.get('/auth/tokens', headers=headers)
+ self.assertValidTokenResponse(r, self.user)
+
+ user = {'password': uuid.uuid4().hex}
+ r = self.patch('/users/%(user_id)s' % {
+ 'user_id': self.user['id']},
+ body={'user': user})
+
+ headers = {'X-Subject-Token': self.keystone_token_id,
+ 'X-Auth-Token': self.keystone_token_id}
+ self.admin_request(path='/auth/tokens', headers=headers,
+ method='GET', expected_status=404)
+
+ def test_deleting_project_also_invalidates_tokens(self):
+ self.test_oauth_flow()
+
+ # delegated keystone token exists
+ headers = {'X-Subject-Token': self.keystone_token_id,
+ 'X-Auth-Token': self.keystone_token_id}
+ r = self.get('/auth/tokens', headers=headers)
+ self.assertValidTokenResponse(r, self.user)
+
+ r = self.delete('/projects/%(project_id)s' % {
+ 'project_id': self.project_id})
+
+ headers = {'X-Subject-Token': self.keystone_token_id,
+ 'X-Auth-Token': self.keystone_token_id}
+ self.admin_request(path='/auth/tokens', headers=headers,
+ method='GET', expected_status=404)
+
+ def test_token_chaining_is_not_allowed(self):
+ self.test_oauth_flow()
+
+ # attempt to re-authenticate (token chain) with the given token
+ path = '/v3/auth/tokens/'
+ auth_data = self.build_authentication_request(
+ token=self.keystone_token_id)
+
+ self.admin_request(
+ path=path,
+ body=auth_data,
+ token=self.keystone_token_id,
+ method='POST',
+ expected_status=403)
+
+ def test_delete_keystone_tokens_by_consumer_id(self):
+ self.test_oauth_flow()
+ self.token_provider_api._persistence.get_token(self.keystone_token_id)
+ self.token_provider_api._persistence.delete_tokens(
+ self.user_id,
+ consumer_id=self.consumer['key'])
+ self.assertRaises(exception.TokenNotFound,
+ self.token_provider_api._persistence.get_token,
+ self.keystone_token_id)
+
+ def _create_trust_get_token(self):
+ ref = self.new_trust_ref(
+ trustor_user_id=self.user_id,
+ trustee_user_id=self.user_id,
+ project_id=self.project_id,
+ impersonation=True,
+ expires=dict(minutes=1),
+ role_ids=[self.role_id])
+ del ref['id']
+
+ r = self.post('/OS-TRUST/trusts', body={'trust': ref})
+ trust = self.assertValidTrustResponse(r)
+
+ auth_data = self.build_authentication_request(
+ user_id=self.user['id'],
+ password=self.user['password'],
+ trust_id=trust['id'])
+
+ return self.get_requested_token(auth_data)
+
+ def _approve_request_token_url(self):
+ consumer = self._create_single_consumer()
+ consumer_id = consumer['id']
+ consumer_secret = consumer['secret']
+ self.consumer = {'key': consumer_id, 'secret': consumer_secret}
+ self.assertIsNotNone(self.consumer['secret'])
+
+ url, headers = self._create_request_token(self.consumer,
+ self.project_id)
+ content = self.post(
+ url, headers=headers,
+ response_content_type='application/x-www-urlformencoded')
+ credentials = urllib.parse.parse_qs(content.result)
+ request_key = credentials['oauth_token'][0]
+ request_secret = credentials['oauth_token_secret'][0]
+ self.request_token = oauth1.Token(request_key, request_secret)
+ self.assertIsNotNone(self.request_token.key)
+
+ url = self._authorize_request_token(request_key)
+
+ return url
+
+ def test_oauth_token_cannot_create_new_trust(self):
+ self.test_oauth_flow()
+ ref = self.new_trust_ref(
+ trustor_user_id=self.user_id,
+ trustee_user_id=self.user_id,
+ project_id=self.project_id,
+ impersonation=True,
+ expires=dict(minutes=1),
+ role_ids=[self.role_id])
+ del ref['id']
+
+ self.post('/OS-TRUST/trusts',
+ body={'trust': ref},
+ token=self.keystone_token_id,
+ expected_status=403)
+
+ def test_oauth_token_cannot_authorize_request_token(self):
+ self.test_oauth_flow()
+ url = self._approve_request_token_url()
+ body = {'roles': [{'id': self.role_id}]}
+ self.put(url, body=body, token=self.keystone_token_id,
+ expected_status=403)
+
+ def test_oauth_token_cannot_list_request_tokens(self):
+ self._set_policy({"identity:list_access_tokens": [],
+ "identity:create_consumer": [],
+ "identity:authorize_request_token": []})
+ self.test_oauth_flow()
+ url = '/users/%s/OS-OAUTH1/access_tokens' % self.user_id
+ self.get(url, token=self.keystone_token_id,
+ expected_status=403)
+
+ def _set_policy(self, new_policy):
+ self.tempfile = self.useFixture(temporaryfile.SecureTempFile())
+ self.tmpfilename = self.tempfile.file_name
+ self.config_fixture.config(group='oslo_policy',
+ policy_file=self.tmpfilename)
+ with open(self.tmpfilename, "w") as policyfile:
+ policyfile.write(jsonutils.dumps(new_policy))
+
+ def test_trust_token_cannot_authorize_request_token(self):
+ trust_token = self._create_trust_get_token()
+ url = self._approve_request_token_url()
+ body = {'roles': [{'id': self.role_id}]}
+ self.put(url, body=body, token=trust_token, expected_status=403)
+
+ def test_trust_token_cannot_list_request_tokens(self):
+ self._set_policy({"identity:list_access_tokens": [],
+ "identity:create_trust": []})
+ trust_token = self._create_trust_get_token()
+ url = '/users/%s/OS-OAUTH1/access_tokens' % self.user_id
+ self.get(url, token=trust_token, expected_status=403)
+
+
+class MaliciousOAuth1Tests(OAuth1Tests):
+
+ def test_bad_consumer_secret(self):
+ consumer = self._create_single_consumer()
+ consumer_id = consumer['id']
+ consumer = {'key': consumer_id, 'secret': uuid.uuid4().hex}
+ url, headers = self._create_request_token(consumer, self.project_id)
+ self.post(url, headers=headers, expected_status=401)
+
+ def test_bad_request_token_key(self):
+ consumer = self._create_single_consumer()
+ consumer_id = consumer['id']
+ consumer_secret = consumer['secret']
+ consumer = {'key': consumer_id, 'secret': consumer_secret}
+ url, headers = self._create_request_token(consumer, self.project_id)
+ self.post(
+ url, headers=headers,
+ response_content_type='application/x-www-urlformencoded')
+ url = self._authorize_request_token(uuid.uuid4().hex)
+ body = {'roles': [{'id': self.role_id}]}
+ self.put(url, body=body, expected_status=404)
+
+ def test_bad_consumer_id(self):
+ consumer = self._create_single_consumer()
+ consumer_id = uuid.uuid4().hex
+ consumer_secret = consumer['secret']
+ consumer = {'key': consumer_id, 'secret': consumer_secret}
+ url, headers = self._create_request_token(consumer, self.project_id)
+ self.post(url, headers=headers, expected_status=404)
+
+ def test_bad_requested_project_id(self):
+ consumer = self._create_single_consumer()
+ consumer_id = consumer['id']
+ consumer_secret = consumer['secret']
+ consumer = {'key': consumer_id, 'secret': consumer_secret}
+ project_id = uuid.uuid4().hex
+ url, headers = self._create_request_token(consumer, project_id)
+ self.post(url, headers=headers, expected_status=404)
+
+ def test_bad_verifier(self):
+ consumer = self._create_single_consumer()
+ consumer_id = consumer['id']
+ consumer_secret = consumer['secret']
+ consumer = {'key': consumer_id, 'secret': consumer_secret}
+
+ url, headers = self._create_request_token(consumer, self.project_id)
+ content = self.post(
+ url, headers=headers,
+ response_content_type='application/x-www-urlformencoded')
+ credentials = urllib.parse.parse_qs(content.result)
+ request_key = credentials['oauth_token'][0]
+ request_secret = credentials['oauth_token_secret'][0]
+ request_token = oauth1.Token(request_key, request_secret)
+
+ url = self._authorize_request_token(request_key)
+ body = {'roles': [{'id': self.role_id}]}
+ resp = self.put(url, body=body, expected_status=200)
+ verifier = resp.result['token']['oauth_verifier']
+ self.assertIsNotNone(verifier)
+
+ request_token.set_verifier(uuid.uuid4().hex)
+ url, headers = self._create_access_token(consumer, request_token)
+ self.post(url, headers=headers, expected_status=401)
+
+ def test_bad_authorizing_roles(self):
+ consumer = self._create_single_consumer()
+ consumer_id = consumer['id']
+ consumer_secret = consumer['secret']
+ consumer = {'key': consumer_id, 'secret': consumer_secret}
+
+ url, headers = self._create_request_token(consumer, self.project_id)
+ content = self.post(
+ url, headers=headers,
+ response_content_type='application/x-www-urlformencoded')
+ credentials = urllib.parse.parse_qs(content.result)
+ request_key = credentials['oauth_token'][0]
+
+ self.assignment_api.remove_role_from_user_and_project(
+ self.user_id, self.project_id, self.role_id)
+ url = self._authorize_request_token(request_key)
+ body = {'roles': [{'id': self.role_id}]}
+ self.admin_request(path=url, method='PUT',
+ body=body, expected_status=404)
+
+ def test_expired_authorizing_request_token(self):
+ self.config_fixture.config(group='oauth1', request_token_duration=-1)
+
+ consumer = self._create_single_consumer()
+ consumer_id = consumer['id']
+ consumer_secret = consumer['secret']
+ self.consumer = {'key': consumer_id, 'secret': consumer_secret}
+ self.assertIsNotNone(self.consumer['key'])
+
+ url, headers = self._create_request_token(self.consumer,
+ self.project_id)
+ content = self.post(
+ url, headers=headers,
+ response_content_type='application/x-www-urlformencoded')
+ credentials = urllib.parse.parse_qs(content.result)
+ request_key = credentials['oauth_token'][0]
+ request_secret = credentials['oauth_token_secret'][0]
+ self.request_token = oauth1.Token(request_key, request_secret)
+ self.assertIsNotNone(self.request_token.key)
+
+ url = self._authorize_request_token(request_key)
+ body = {'roles': [{'id': self.role_id}]}
+ self.put(url, body=body, expected_status=401)
+
+ def test_expired_creating_keystone_token(self):
+ self.config_fixture.config(group='oauth1', access_token_duration=-1)
+ consumer = self._create_single_consumer()
+ consumer_id = consumer['id']
+ consumer_secret = consumer['secret']
+ self.consumer = {'key': consumer_id, 'secret': consumer_secret}
+ self.assertIsNotNone(self.consumer['key'])
+
+ url, headers = self._create_request_token(self.consumer,
+ self.project_id)
+ content = self.post(
+ url, headers=headers,
+ response_content_type='application/x-www-urlformencoded')
+ credentials = urllib.parse.parse_qs(content.result)
+ request_key = credentials['oauth_token'][0]
+ request_secret = credentials['oauth_token_secret'][0]
+ self.request_token = oauth1.Token(request_key, request_secret)
+ self.assertIsNotNone(self.request_token.key)
+
+ url = self._authorize_request_token(request_key)
+ body = {'roles': [{'id': self.role_id}]}
+ resp = self.put(url, body=body, expected_status=200)
+ self.verifier = resp.result['token']['oauth_verifier']
+
+ self.request_token.set_verifier(self.verifier)
+ url, headers = self._create_access_token(self.consumer,
+ self.request_token)
+ content = self.post(
+ url, headers=headers,
+ response_content_type='application/x-www-urlformencoded')
+ credentials = urllib.parse.parse_qs(content.result)
+ access_key = credentials['oauth_token'][0]
+ access_secret = credentials['oauth_token_secret'][0]
+ self.access_token = oauth1.Token(access_key, access_secret)
+ self.assertIsNotNone(self.access_token.key)
+
+ url, headers, body = self._get_oauth_token(self.consumer,
+ self.access_token)
+ self.post(url, headers=headers, body=body, expected_status=401)
+
+ def test_missing_oauth_headers(self):
+ endpoint = '/OS-OAUTH1/request_token'
+ client = oauth1.Client(uuid.uuid4().hex,
+ client_secret=uuid.uuid4().hex,
+ signature_method=oauth1.SIG_HMAC,
+ callback_uri="oob")
+ headers = {'requested_project_id': uuid.uuid4().hex}
+ _url, headers, _body = client.sign(self.base_url + endpoint,
+ http_method='POST',
+ headers=headers)
+
+ # NOTE(stevemar): To simulate this error, we remove the Authorization
+ # header from the post request.
+ del headers['Authorization']
+ self.post(endpoint, headers=headers, expected_status=500)
+
+
+class OAuthNotificationTests(OAuth1Tests,
+ test_notifications.BaseNotificationTest):
+
+ def test_create_consumer(self):
+ consumer_ref = self._create_single_consumer()
+ self._assert_notify_sent(consumer_ref['id'],
+ test_notifications.CREATED_OPERATION,
+ 'OS-OAUTH1:consumer')
+ self._assert_last_audit(consumer_ref['id'],
+ test_notifications.CREATED_OPERATION,
+ 'OS-OAUTH1:consumer',
+ cadftaxonomy.SECURITY_ACCOUNT)
+
+ def test_update_consumer(self):
+ consumer_ref = self._create_single_consumer()
+ update_ref = {'consumer': {'description': uuid.uuid4().hex}}
+ self.oauth_api.update_consumer(consumer_ref['id'], update_ref)
+ self._assert_notify_sent(consumer_ref['id'],
+ test_notifications.UPDATED_OPERATION,
+ 'OS-OAUTH1:consumer')
+ self._assert_last_audit(consumer_ref['id'],
+ test_notifications.UPDATED_OPERATION,
+ 'OS-OAUTH1:consumer',
+ cadftaxonomy.SECURITY_ACCOUNT)
+
+ def test_delete_consumer(self):
+ consumer_ref = self._create_single_consumer()
+ self.oauth_api.delete_consumer(consumer_ref['id'])
+ self._assert_notify_sent(consumer_ref['id'],
+ test_notifications.DELETED_OPERATION,
+ 'OS-OAUTH1:consumer')
+ self._assert_last_audit(consumer_ref['id'],
+ test_notifications.DELETED_OPERATION,
+ 'OS-OAUTH1:consumer',
+ cadftaxonomy.SECURITY_ACCOUNT)
+
+ def test_oauth_flow_notifications(self):
+ """Test to ensure notifications are sent for oauth tokens
+
+ This test is very similar to test_oauth_flow, however
+ there are additional checks in this test for ensuring that
+ notifications for request token creation, and access token
+ creation/deletion are emitted.
+ """
+
+ consumer = self._create_single_consumer()
+ consumer_id = consumer['id']
+ consumer_secret = consumer['secret']
+ self.consumer = {'key': consumer_id, 'secret': consumer_secret}
+ self.assertIsNotNone(self.consumer['secret'])
+
+ url, headers = self._create_request_token(self.consumer,
+ self.project_id)
+ content = self.post(
+ url, headers=headers,
+ response_content_type='application/x-www-urlformencoded')
+ credentials = urllib.parse.parse_qs(content.result)
+ request_key = credentials['oauth_token'][0]
+ request_secret = credentials['oauth_token_secret'][0]
+ self.request_token = oauth1.Token(request_key, request_secret)
+ self.assertIsNotNone(self.request_token.key)
+
+ # Test to ensure the create request token notification is sent
+ self._assert_notify_sent(request_key,
+ test_notifications.CREATED_OPERATION,
+ 'OS-OAUTH1:request_token')
+ self._assert_last_audit(request_key,
+ test_notifications.CREATED_OPERATION,
+ 'OS-OAUTH1:request_token',
+ cadftaxonomy.SECURITY_CREDENTIAL)
+
+ url = self._authorize_request_token(request_key)
+ body = {'roles': [{'id': self.role_id}]}
+ resp = self.put(url, body=body, expected_status=200)
+ self.verifier = resp.result['token']['oauth_verifier']
+ self.assertTrue(all(i in core.VERIFIER_CHARS for i in self.verifier))
+ self.assertEqual(8, len(self.verifier))
+
+ self.request_token.set_verifier(self.verifier)
+ url, headers = self._create_access_token(self.consumer,
+ self.request_token)
+ content = self.post(
+ url, headers=headers,
+ response_content_type='application/x-www-urlformencoded')
+ credentials = urllib.parse.parse_qs(content.result)
+ access_key = credentials['oauth_token'][0]
+ access_secret = credentials['oauth_token_secret'][0]
+ self.access_token = oauth1.Token(access_key, access_secret)
+ self.assertIsNotNone(self.access_token.key)
+
+ # Test to ensure the create access token notification is sent
+ self._assert_notify_sent(access_key,
+ test_notifications.CREATED_OPERATION,
+ 'OS-OAUTH1:access_token')
+ self._assert_last_audit(access_key,
+ test_notifications.CREATED_OPERATION,
+ 'OS-OAUTH1:access_token',
+ cadftaxonomy.SECURITY_CREDENTIAL)
+
+ resp = self.delete('/users/%(user)s/OS-OAUTH1/access_tokens/%(auth)s'
+ % {'user': self.user_id,
+ 'auth': self.access_token.key})
+ self.assertResponseStatus(resp, 204)
+
+ # Test to ensure the delete access token notification is sent
+ self._assert_notify_sent(access_key,
+ test_notifications.DELETED_OPERATION,
+ 'OS-OAUTH1:access_token')
+ self._assert_last_audit(access_key,
+ test_notifications.DELETED_OPERATION,
+ 'OS-OAUTH1:access_token',
+ cadftaxonomy.SECURITY_CREDENTIAL)
+
+
+class OAuthCADFNotificationTests(OAuthNotificationTests):
+
+ def setUp(self):
+ """Repeat the tests for CADF notifications """
+ super(OAuthCADFNotificationTests, self).setUp()
+ self.config_fixture.config(notification_format='cadf')
+
+
+class JsonHomeTests(OAuth1Tests, test_v3.JsonHomeTestMixin):
+ JSON_HOME_DATA = {
+ 'http://docs.openstack.org/api/openstack-identity/3/ext/OS-OAUTH1/1.0/'
+ 'rel/consumers': {
+ 'href': '/OS-OAUTH1/consumers',
+ },
+ }
diff --git a/keystone-moon/keystone/tests/unit/test_v3_os_revoke.py b/keystone-moon/keystone/tests/unit/test_v3_os_revoke.py
new file mode 100644
index 00000000..5710d973
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/test_v3_os_revoke.py
@@ -0,0 +1,135 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import datetime
+import uuid
+
+from oslo_utils import timeutils
+import six
+from testtools import matchers
+
+from keystone.contrib.revoke import model
+from keystone.tests.unit import test_v3
+from keystone.token import provider
+
+
+def _future_time_string():
+ expire_delta = datetime.timedelta(seconds=1000)
+ future_time = timeutils.utcnow() + expire_delta
+ return timeutils.isotime(future_time)
+
+
+class OSRevokeTests(test_v3.RestfulTestCase, test_v3.JsonHomeTestMixin):
+ EXTENSION_NAME = 'revoke'
+ EXTENSION_TO_ADD = 'revoke_extension'
+
+ JSON_HOME_DATA = {
+ 'http://docs.openstack.org/api/openstack-identity/3/ext/OS-REVOKE/1.0/'
+ 'rel/events': {
+ 'href': '/OS-REVOKE/events',
+ },
+ }
+
+ def test_get_empty_list(self):
+ resp = self.get('/OS-REVOKE/events')
+ self.assertEqual([], resp.json_body['events'])
+
+ def _blank_event(self):
+ return {}
+
+ # The two values will be the same with the exception of
+ # 'issued_before' which is set when the event is recorded.
+ def assertReportedEventMatchesRecorded(self, event, sample, before_time):
+ after_time = timeutils.utcnow()
+ event_issued_before = timeutils.normalize_time(
+ timeutils.parse_isotime(event['issued_before']))
+ self.assertTrue(
+ before_time <= event_issued_before,
+ 'invalid event issued_before time; %s is not later than %s.' % (
+ timeutils.isotime(event_issued_before, subsecond=True),
+ timeutils.isotime(before_time, subsecond=True)))
+ self.assertTrue(
+ event_issued_before <= after_time,
+ 'invalid event issued_before time; %s is not earlier than %s.' % (
+ timeutils.isotime(event_issued_before, subsecond=True),
+ timeutils.isotime(after_time, subsecond=True)))
+ del (event['issued_before'])
+ self.assertEqual(sample, event)
+
+ def test_revoked_list_self_url(self):
+ revoked_list_url = '/OS-REVOKE/events'
+ resp = self.get(revoked_list_url)
+ links = resp.json_body['links']
+ self.assertThat(links['self'], matchers.EndsWith(revoked_list_url))
+
+ def test_revoked_token_in_list(self):
+ user_id = uuid.uuid4().hex
+ expires_at = provider.default_expire_time()
+ sample = self._blank_event()
+ sample['user_id'] = six.text_type(user_id)
+ sample['expires_at'] = six.text_type(timeutils.isotime(expires_at))
+ before_time = timeutils.utcnow()
+ self.revoke_api.revoke_by_expiration(user_id, expires_at)
+ resp = self.get('/OS-REVOKE/events')
+ events = resp.json_body['events']
+ self.assertEqual(1, len(events))
+ self.assertReportedEventMatchesRecorded(events[0], sample, before_time)
+
+ def test_disabled_project_in_list(self):
+ project_id = uuid.uuid4().hex
+ sample = dict()
+ sample['project_id'] = six.text_type(project_id)
+ before_time = timeutils.utcnow()
+ self.revoke_api.revoke(
+ model.RevokeEvent(project_id=project_id))
+
+ resp = self.get('/OS-REVOKE/events')
+ events = resp.json_body['events']
+ self.assertEqual(1, len(events))
+ self.assertReportedEventMatchesRecorded(events[0], sample, before_time)
+
+ def test_disabled_domain_in_list(self):
+ domain_id = uuid.uuid4().hex
+ sample = dict()
+ sample['domain_id'] = six.text_type(domain_id)
+ before_time = timeutils.utcnow()
+ self.revoke_api.revoke(
+ model.RevokeEvent(domain_id=domain_id))
+
+ resp = self.get('/OS-REVOKE/events')
+ events = resp.json_body['events']
+ self.assertEqual(1, len(events))
+ self.assertReportedEventMatchesRecorded(events[0], sample, before_time)
+
+ def test_list_since_invalid(self):
+ self.get('/OS-REVOKE/events?since=blah', expected_status=400)
+
+ def test_list_since_valid(self):
+ resp = self.get('/OS-REVOKE/events?since=2013-02-27T18:30:59.999999Z')
+ events = resp.json_body['events']
+ self.assertEqual(0, len(events))
+
+ def test_since_future_time_no_events(self):
+ domain_id = uuid.uuid4().hex
+ sample = dict()
+ sample['domain_id'] = six.text_type(domain_id)
+
+ self.revoke_api.revoke(
+ model.RevokeEvent(domain_id=domain_id))
+
+ resp = self.get('/OS-REVOKE/events')
+ events = resp.json_body['events']
+ self.assertEqual(1, len(events))
+
+ resp = self.get('/OS-REVOKE/events?since=%s' % _future_time_string())
+ events = resp.json_body['events']
+ self.assertEqual([], events)
diff --git a/keystone-moon/keystone/tests/unit/test_v3_policy.py b/keystone-moon/keystone/tests/unit/test_v3_policy.py
new file mode 100644
index 00000000..538fc565
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/test_v3_policy.py
@@ -0,0 +1,68 @@
+# Copyright 2013 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import uuid
+
+from keystone.tests.unit import test_v3
+
+
+class PolicyTestCase(test_v3.RestfulTestCase):
+ """Test policy CRUD."""
+
+ def setUp(self):
+ super(PolicyTestCase, self).setUp()
+ self.policy_id = uuid.uuid4().hex
+ self.policy = self.new_policy_ref()
+ self.policy['id'] = self.policy_id
+ self.policy_api.create_policy(
+ self.policy_id,
+ self.policy.copy())
+
+ # policy crud tests
+
+ def test_create_policy(self):
+ """Call ``POST /policies``."""
+ ref = self.new_policy_ref()
+ r = self.post(
+ '/policies',
+ body={'policy': ref})
+ return self.assertValidPolicyResponse(r, ref)
+
+ def test_list_policies(self):
+ """Call ``GET /policies``."""
+ r = self.get('/policies')
+ self.assertValidPolicyListResponse(r, ref=self.policy)
+
+ def test_get_policy(self):
+ """Call ``GET /policies/{policy_id}``."""
+ r = self.get(
+ '/policies/%(policy_id)s' % {
+ 'policy_id': self.policy_id})
+ self.assertValidPolicyResponse(r, self.policy)
+
+ def test_update_policy(self):
+ """Call ``PATCH /policies/{policy_id}``."""
+ policy = self.new_policy_ref()
+ policy['id'] = self.policy_id
+ r = self.patch(
+ '/policies/%(policy_id)s' % {
+ 'policy_id': self.policy_id},
+ body={'policy': policy})
+ self.assertValidPolicyResponse(r, policy)
+
+ def test_delete_policy(self):
+ """Call ``DELETE /policies/{policy_id}``."""
+ self.delete(
+ '/policies/%(policy_id)s' % {
+ 'policy_id': self.policy_id})
diff --git a/keystone-moon/keystone/tests/unit/test_v3_protection.py b/keystone-moon/keystone/tests/unit/test_v3_protection.py
new file mode 100644
index 00000000..2b2c96d1
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/test_v3_protection.py
@@ -0,0 +1,1170 @@
+# Copyright 2012 OpenStack Foundation
+# Copyright 2013 IBM Corp.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import uuid
+
+from oslo_config import cfg
+from oslo_serialization import jsonutils
+
+from keystone import exception
+from keystone.policy.backends import rules
+from keystone.tests import unit as tests
+from keystone.tests.unit.ksfixtures import temporaryfile
+from keystone.tests.unit import test_v3
+
+
+CONF = cfg.CONF
+DEFAULT_DOMAIN_ID = CONF.identity.default_domain_id
+
+
+class IdentityTestProtectedCase(test_v3.RestfulTestCase):
+ """Test policy enforcement on the v3 Identity API."""
+
+ def setUp(self):
+ """Setup for Identity Protection Test Cases.
+
+ As well as the usual housekeeping, create a set of domains,
+ users, roles and projects for the subsequent tests:
+
+ - Three domains: A,B & C. C is disabled.
+ - DomainA has user1, DomainB has user2 and user3
+ - DomainA has group1 and group2, DomainB has group3
+ - User1 has two roles on DomainA
+ - User2 has one role on DomainA
+
+ Remember that there will also be a fourth domain in existence,
+ the default domain.
+
+ """
+ # Ensure that test_v3.RestfulTestCase doesn't load its own
+ # sample data, which would make checking the results of our
+ # tests harder
+ super(IdentityTestProtectedCase, self).setUp()
+ self.tempfile = self.useFixture(temporaryfile.SecureTempFile())
+ self.tmpfilename = self.tempfile.file_name
+ self.config_fixture.config(group='oslo_policy',
+ policy_file=self.tmpfilename)
+
+ # A default auth request we can use - un-scoped user token
+ self.auth = self.build_authentication_request(
+ user_id=self.user1['id'],
+ password=self.user1['password'])
+
+ def load_sample_data(self):
+ self._populate_default_domain()
+ # Start by creating a couple of domains
+ self.domainA = self.new_domain_ref()
+ self.resource_api.create_domain(self.domainA['id'], self.domainA)
+ self.domainB = self.new_domain_ref()
+ self.resource_api.create_domain(self.domainB['id'], self.domainB)
+ self.domainC = self.new_domain_ref()
+ self.domainC['enabled'] = False
+ self.resource_api.create_domain(self.domainC['id'], self.domainC)
+
+ # Now create some users, one in domainA and two of them in domainB
+ self.user1 = self.new_user_ref(domain_id=self.domainA['id'])
+ password = uuid.uuid4().hex
+ self.user1['password'] = password
+ self.user1 = self.identity_api.create_user(self.user1)
+ self.user1['password'] = password
+
+ self.user2 = self.new_user_ref(domain_id=self.domainB['id'])
+ password = uuid.uuid4().hex
+ self.user2['password'] = password
+ self.user2 = self.identity_api.create_user(self.user2)
+ self.user2['password'] = password
+
+ self.user3 = self.new_user_ref(domain_id=self.domainB['id'])
+ password = uuid.uuid4().hex
+ self.user3['password'] = password
+ self.user3 = self.identity_api.create_user(self.user3)
+ self.user3['password'] = password
+
+ self.group1 = self.new_group_ref(domain_id=self.domainA['id'])
+ self.group1 = self.identity_api.create_group(self.group1)
+
+ self.group2 = self.new_group_ref(domain_id=self.domainA['id'])
+ self.group2 = self.identity_api.create_group(self.group2)
+
+ self.group3 = self.new_group_ref(domain_id=self.domainB['id'])
+ self.group3 = self.identity_api.create_group(self.group3)
+
+ self.role = self.new_role_ref()
+ self.role_api.create_role(self.role['id'], self.role)
+ self.role1 = self.new_role_ref()
+ self.role_api.create_role(self.role1['id'], self.role1)
+ self.assignment_api.create_grant(self.role['id'],
+ user_id=self.user1['id'],
+ domain_id=self.domainA['id'])
+ self.assignment_api.create_grant(self.role['id'],
+ user_id=self.user2['id'],
+ domain_id=self.domainA['id'])
+ self.assignment_api.create_grant(self.role1['id'],
+ user_id=self.user1['id'],
+ domain_id=self.domainA['id'])
+
+ def _get_id_list_from_ref_list(self, ref_list):
+ result_list = []
+ for x in ref_list:
+ result_list.append(x['id'])
+ return result_list
+
+ def _set_policy(self, new_policy):
+ with open(self.tmpfilename, "w") as policyfile:
+ policyfile.write(jsonutils.dumps(new_policy))
+
+ def test_list_users_unprotected(self):
+ """GET /users (unprotected)
+
+ Test Plan:
+
+ - Update policy so api is unprotected
+ - Use an un-scoped token to make sure we can get back all
+ the users independent of domain
+
+ """
+ self._set_policy({"identity:list_users": []})
+ r = self.get('/users', auth=self.auth)
+ id_list = self._get_id_list_from_ref_list(r.result.get('users'))
+ self.assertIn(self.user1['id'], id_list)
+ self.assertIn(self.user2['id'], id_list)
+ self.assertIn(self.user3['id'], id_list)
+
+ def test_list_users_filtered_by_domain(self):
+ """GET /users?domain_id=mydomain (filtered)
+
+ Test Plan:
+
+ - Update policy so api is unprotected
+ - Use an un-scoped token to make sure we can filter the
+ users by domainB, getting back the 2 users in that domain
+
+ """
+ self._set_policy({"identity:list_users": []})
+ url_by_name = '/users?domain_id=%s' % self.domainB['id']
+ r = self.get(url_by_name, auth=self.auth)
+ # We should get back two users, those in DomainB
+ id_list = self._get_id_list_from_ref_list(r.result.get('users'))
+ self.assertIn(self.user2['id'], id_list)
+ self.assertIn(self.user3['id'], id_list)
+
+ def test_get_user_protected_match_id(self):
+ """GET /users/{id} (match payload)
+
+ Test Plan:
+
+ - Update policy to protect api by user_id
+ - List users with user_id of user1 as filter, to check that
+ this will correctly match user_id in the flattened
+ payload
+
+ """
+ # TODO(henry-nash, ayoung): It would be good to expand this
+ # test for further test flattening, e.g. protect on, say, an
+ # attribute of an object being created
+ new_policy = {"identity:get_user": [["user_id:%(user_id)s"]]}
+ self._set_policy(new_policy)
+ url_by_name = '/users/%s' % self.user1['id']
+ r = self.get(url_by_name, auth=self.auth)
+ self.assertEqual(self.user1['id'], r.result['user']['id'])
+
+ def test_get_user_protected_match_target(self):
+ """GET /users/{id} (match target)
+
+ Test Plan:
+
+ - Update policy to protect api by domain_id
+ - Try and read a user who is in DomainB with a token scoped
+ to Domain A - this should fail
+ - Retry this for a user who is in Domain A, which should succeed.
+ - Finally, try getting a user that does not exist, which should
+ still return UserNotFound
+
+ """
+ new_policy = {'identity:get_user':
+ [["domain_id:%(target.user.domain_id)s"]]}
+ self._set_policy(new_policy)
+ self.auth = self.build_authentication_request(
+ user_id=self.user1['id'],
+ password=self.user1['password'],
+ domain_id=self.domainA['id'])
+ url_by_name = '/users/%s' % self.user2['id']
+ r = self.get(url_by_name, auth=self.auth,
+ expected_status=exception.ForbiddenAction.code)
+
+ url_by_name = '/users/%s' % self.user1['id']
+ r = self.get(url_by_name, auth=self.auth)
+ self.assertEqual(self.user1['id'], r.result['user']['id'])
+
+ url_by_name = '/users/%s' % uuid.uuid4().hex
+ r = self.get(url_by_name, auth=self.auth,
+ expected_status=exception.UserNotFound.code)
+
+ def test_revoke_grant_protected_match_target(self):
+ """DELETE /domains/{id}/users/{id}/roles/{id} (match target)
+
+ Test Plan:
+
+ - Update policy to protect api by domain_id of entities in
+ the grant
+ - Try and delete the existing grant that has a user who is
+ from a different domain - this should fail.
+ - Retry this for a user who is in Domain A, which should succeed.
+
+ """
+ new_policy = {'identity:revoke_grant':
+ [["domain_id:%(target.user.domain_id)s"]]}
+ self._set_policy(new_policy)
+ collection_url = (
+ '/domains/%(domain_id)s/users/%(user_id)s/roles' % {
+ 'domain_id': self.domainA['id'],
+ 'user_id': self.user2['id']})
+ member_url = '%(collection_url)s/%(role_id)s' % {
+ 'collection_url': collection_url,
+ 'role_id': self.role['id']}
+
+ self.auth = self.build_authentication_request(
+ user_id=self.user1['id'],
+ password=self.user1['password'],
+ domain_id=self.domainA['id'])
+ self.delete(member_url, auth=self.auth,
+ expected_status=exception.ForbiddenAction.code)
+
+ collection_url = (
+ '/domains/%(domain_id)s/users/%(user_id)s/roles' % {
+ 'domain_id': self.domainA['id'],
+ 'user_id': self.user1['id']})
+ member_url = '%(collection_url)s/%(role_id)s' % {
+ 'collection_url': collection_url,
+ 'role_id': self.role1['id']}
+ self.delete(member_url, auth=self.auth)
+
+ def test_list_users_protected_by_domain(self):
+ """GET /users?domain_id=mydomain (protected)
+
+ Test Plan:
+
+ - Update policy to protect api by domain_id
+ - List groups using a token scoped to domainA with a filter
+ specifying domainA - we should only get back the one user
+ that is in domainA.
+ - Try and read the users from domainB - this should fail since
+ we don't have a token scoped for domainB
+
+ """
+ new_policy = {"identity:list_users": ["domain_id:%(domain_id)s"]}
+ self._set_policy(new_policy)
+ self.auth = self.build_authentication_request(
+ user_id=self.user1['id'],
+ password=self.user1['password'],
+ domain_id=self.domainA['id'])
+ url_by_name = '/users?domain_id=%s' % self.domainA['id']
+ r = self.get(url_by_name, auth=self.auth)
+ # We should only get back one user, the one in DomainA
+ id_list = self._get_id_list_from_ref_list(r.result.get('users'))
+ self.assertEqual(1, len(id_list))
+ self.assertIn(self.user1['id'], id_list)
+
+ # Now try for domainB, which should fail
+ url_by_name = '/users?domain_id=%s' % self.domainB['id']
+ r = self.get(url_by_name, auth=self.auth,
+ expected_status=exception.ForbiddenAction.code)
+
+ def test_list_groups_protected_by_domain(self):
+ """GET /groups?domain_id=mydomain (protected)
+
+ Test Plan:
+
+ - Update policy to protect api by domain_id
+ - List groups using a token scoped to domainA and make sure
+ we only get back the two groups that are in domainA
+ - Try and read the groups from domainB - this should fail since
+ we don't have a token scoped for domainB
+
+ """
+ new_policy = {"identity:list_groups": ["domain_id:%(domain_id)s"]}
+ self._set_policy(new_policy)
+ self.auth = self.build_authentication_request(
+ user_id=self.user1['id'],
+ password=self.user1['password'],
+ domain_id=self.domainA['id'])
+ url_by_name = '/groups?domain_id=%s' % self.domainA['id']
+ r = self.get(url_by_name, auth=self.auth)
+ # We should only get back two groups, the ones in DomainA
+ id_list = self._get_id_list_from_ref_list(r.result.get('groups'))
+ self.assertEqual(2, len(id_list))
+ self.assertIn(self.group1['id'], id_list)
+ self.assertIn(self.group2['id'], id_list)
+
+ # Now try for domainB, which should fail
+ url_by_name = '/groups?domain_id=%s' % self.domainB['id']
+ r = self.get(url_by_name, auth=self.auth,
+ expected_status=exception.ForbiddenAction.code)
+
+ def test_list_groups_protected_by_domain_and_filtered(self):
+ """GET /groups?domain_id=mydomain&name=myname (protected)
+
+ Test Plan:
+
+ - Update policy to protect api by domain_id
+ - List groups using a token scoped to domainA with a filter
+ specifying both domainA and the name of group.
+ - We should only get back the group in domainA that matches
+ the name
+
+ """
+ new_policy = {"identity:list_groups": ["domain_id:%(domain_id)s"]}
+ self._set_policy(new_policy)
+ self.auth = self.build_authentication_request(
+ user_id=self.user1['id'],
+ password=self.user1['password'],
+ domain_id=self.domainA['id'])
+ url_by_name = '/groups?domain_id=%s&name=%s' % (
+ self.domainA['id'], self.group2['name'])
+ r = self.get(url_by_name, auth=self.auth)
+ # We should only get back one user, the one in DomainA that matches
+ # the name supplied
+ id_list = self._get_id_list_from_ref_list(r.result.get('groups'))
+ self.assertEqual(1, len(id_list))
+ self.assertIn(self.group2['id'], id_list)
+
+
+class IdentityTestPolicySample(test_v3.RestfulTestCase):
+ """Test policy enforcement of the policy.json file."""
+
+ def load_sample_data(self):
+ self._populate_default_domain()
+
+ self.just_a_user = self.new_user_ref(
+ domain_id=CONF.identity.default_domain_id)
+ password = uuid.uuid4().hex
+ self.just_a_user['password'] = password
+ self.just_a_user = self.identity_api.create_user(self.just_a_user)
+ self.just_a_user['password'] = password
+
+ self.another_user = self.new_user_ref(
+ domain_id=CONF.identity.default_domain_id)
+ password = uuid.uuid4().hex
+ self.another_user['password'] = password
+ self.another_user = self.identity_api.create_user(self.another_user)
+ self.another_user['password'] = password
+
+ self.admin_user = self.new_user_ref(
+ domain_id=CONF.identity.default_domain_id)
+ password = uuid.uuid4().hex
+ self.admin_user['password'] = password
+ self.admin_user = self.identity_api.create_user(self.admin_user)
+ self.admin_user['password'] = password
+
+ self.role = self.new_role_ref()
+ self.role_api.create_role(self.role['id'], self.role)
+ self.admin_role = {'id': uuid.uuid4().hex, 'name': 'admin'}
+ self.role_api.create_role(self.admin_role['id'], self.admin_role)
+
+ # Create and assign roles to the project
+ self.project = self.new_project_ref(
+ domain_id=CONF.identity.default_domain_id)
+ self.resource_api.create_project(self.project['id'], self.project)
+ self.assignment_api.create_grant(self.role['id'],
+ user_id=self.just_a_user['id'],
+ project_id=self.project['id'])
+ self.assignment_api.create_grant(self.role['id'],
+ user_id=self.another_user['id'],
+ project_id=self.project['id'])
+ self.assignment_api.create_grant(self.admin_role['id'],
+ user_id=self.admin_user['id'],
+ project_id=self.project['id'])
+
+ def test_user_validate_same_token(self):
+ # Given a non-admin user token, the token can be used to validate
+ # itself.
+ # This is GET /v3/auth/tokens, with X-Auth-Token == X-Subject-Token
+ # FIXME(blk-u): This test fails, a user can't validate their own token,
+ # see bug 1421825.
+
+ auth = self.build_authentication_request(
+ user_id=self.just_a_user['id'],
+ password=self.just_a_user['password'])
+ token = self.get_requested_token(auth)
+
+ # FIXME(blk-u): remove expected_status=403.
+ self.get('/auth/tokens', token=token,
+ headers={'X-Subject-Token': token}, expected_status=403)
+
+ def test_user_validate_user_token(self):
+ # A user can validate one of their own tokens.
+ # This is GET /v3/auth/tokens
+ # FIXME(blk-u): This test fails, a user can't validate their own token,
+ # see bug 1421825.
+
+ auth = self.build_authentication_request(
+ user_id=self.just_a_user['id'],
+ password=self.just_a_user['password'])
+ token1 = self.get_requested_token(auth)
+ token2 = self.get_requested_token(auth)
+
+ # FIXME(blk-u): remove expected_status=403.
+ self.get('/auth/tokens', token=token1,
+ headers={'X-Subject-Token': token2}, expected_status=403)
+
+ def test_user_validate_other_user_token_rejected(self):
+ # A user cannot validate another user's token.
+ # This is GET /v3/auth/tokens
+
+ user1_auth = self.build_authentication_request(
+ user_id=self.just_a_user['id'],
+ password=self.just_a_user['password'])
+ user1_token = self.get_requested_token(user1_auth)
+
+ user2_auth = self.build_authentication_request(
+ user_id=self.another_user['id'],
+ password=self.another_user['password'])
+ user2_token = self.get_requested_token(user2_auth)
+
+ self.get('/auth/tokens', token=user1_token,
+ headers={'X-Subject-Token': user2_token}, expected_status=403)
+
+ def test_admin_validate_user_token(self):
+ # An admin can validate a user's token.
+ # This is GET /v3/auth/tokens
+
+ admin_auth = self.build_authentication_request(
+ user_id=self.admin_user['id'],
+ password=self.admin_user['password'],
+ project_id=self.project['id'])
+ admin_token = self.get_requested_token(admin_auth)
+
+ user_auth = self.build_authentication_request(
+ user_id=self.just_a_user['id'],
+ password=self.just_a_user['password'])
+ user_token = self.get_requested_token(user_auth)
+
+ self.get('/auth/tokens', token=admin_token,
+ headers={'X-Subject-Token': user_token})
+
+ def test_user_check_same_token(self):
+ # Given a non-admin user token, the token can be used to check
+ # itself.
+ # This is HEAD /v3/auth/tokens, with X-Auth-Token == X-Subject-Token
+ # FIXME(blk-u): This test fails, a user can't check the same token,
+ # see bug 1421825.
+
+ auth = self.build_authentication_request(
+ user_id=self.just_a_user['id'],
+ password=self.just_a_user['password'])
+ token = self.get_requested_token(auth)
+
+ # FIXME(blk-u): change to expected_status=200
+ self.head('/auth/tokens', token=token,
+ headers={'X-Subject-Token': token}, expected_status=403)
+
+ def test_user_check_user_token(self):
+ # A user can check one of their own tokens.
+ # This is HEAD /v3/auth/tokens
+ # FIXME(blk-u): This test fails, a user can't check the same token,
+ # see bug 1421825.
+
+ auth = self.build_authentication_request(
+ user_id=self.just_a_user['id'],
+ password=self.just_a_user['password'])
+ token1 = self.get_requested_token(auth)
+ token2 = self.get_requested_token(auth)
+
+ # FIXME(blk-u): change to expected_status=200
+ self.head('/auth/tokens', token=token1,
+ headers={'X-Subject-Token': token2}, expected_status=403)
+
+ def test_user_check_other_user_token_rejected(self):
+ # A user cannot check another user's token.
+ # This is HEAD /v3/auth/tokens
+
+ user1_auth = self.build_authentication_request(
+ user_id=self.just_a_user['id'],
+ password=self.just_a_user['password'])
+ user1_token = self.get_requested_token(user1_auth)
+
+ user2_auth = self.build_authentication_request(
+ user_id=self.another_user['id'],
+ password=self.another_user['password'])
+ user2_token = self.get_requested_token(user2_auth)
+
+ self.head('/auth/tokens', token=user1_token,
+ headers={'X-Subject-Token': user2_token},
+ expected_status=403)
+
+ def test_admin_check_user_token(self):
+ # An admin can check a user's token.
+ # This is HEAD /v3/auth/tokens
+
+ admin_auth = self.build_authentication_request(
+ user_id=self.admin_user['id'],
+ password=self.admin_user['password'],
+ project_id=self.project['id'])
+ admin_token = self.get_requested_token(admin_auth)
+
+ user_auth = self.build_authentication_request(
+ user_id=self.just_a_user['id'],
+ password=self.just_a_user['password'])
+ user_token = self.get_requested_token(user_auth)
+
+ self.head('/auth/tokens', token=admin_token,
+ headers={'X-Subject-Token': user_token}, expected_status=200)
+
+ def test_user_revoke_same_token(self):
+ # Given a non-admin user token, the token can be used to revoke
+ # itself.
+ # This is DELETE /v3/auth/tokens, with X-Auth-Token == X-Subject-Token
+ # FIXME(blk-u): This test fails, a user can't revoke the same token,
+ # see bug 1421825.
+
+ auth = self.build_authentication_request(
+ user_id=self.just_a_user['id'],
+ password=self.just_a_user['password'])
+ token = self.get_requested_token(auth)
+
+ # FIXME(blk-u): remove expected_status=403
+ self.delete('/auth/tokens', token=token,
+ headers={'X-Subject-Token': token}, expected_status=403)
+
+ def test_user_revoke_user_token(self):
+ # A user can revoke one of their own tokens.
+ # This is DELETE /v3/auth/tokens
+ # FIXME(blk-u): This test fails, a user can't revoke the same token,
+ # see bug 1421825.
+
+ auth = self.build_authentication_request(
+ user_id=self.just_a_user['id'],
+ password=self.just_a_user['password'])
+ token1 = self.get_requested_token(auth)
+ token2 = self.get_requested_token(auth)
+
+ # FIXME(blk-u): remove expected_status=403
+ self.delete('/auth/tokens', token=token1,
+ headers={'X-Subject-Token': token2}, expected_status=403)
+
+ def test_user_revoke_other_user_token_rejected(self):
+ # A user cannot revoke another user's token.
+ # This is DELETE /v3/auth/tokens
+
+ user1_auth = self.build_authentication_request(
+ user_id=self.just_a_user['id'],
+ password=self.just_a_user['password'])
+ user1_token = self.get_requested_token(user1_auth)
+
+ user2_auth = self.build_authentication_request(
+ user_id=self.another_user['id'],
+ password=self.another_user['password'])
+ user2_token = self.get_requested_token(user2_auth)
+
+ self.delete('/auth/tokens', token=user1_token,
+ headers={'X-Subject-Token': user2_token},
+ expected_status=403)
+
+ def test_admin_revoke_user_token(self):
+ # An admin can revoke a user's token.
+ # This is DELETE /v3/auth/tokens
+
+ admin_auth = self.build_authentication_request(
+ user_id=self.admin_user['id'],
+ password=self.admin_user['password'],
+ project_id=self.project['id'])
+ admin_token = self.get_requested_token(admin_auth)
+
+ user_auth = self.build_authentication_request(
+ user_id=self.just_a_user['id'],
+ password=self.just_a_user['password'])
+ user_token = self.get_requested_token(user_auth)
+
+ self.delete('/auth/tokens', token=admin_token,
+ headers={'X-Subject-Token': user_token})
+
+
+class IdentityTestv3CloudPolicySample(test_v3.RestfulTestCase):
+ """Test policy enforcement of the sample v3 cloud policy file."""
+
+ def setUp(self):
+ """Setup for v3 Cloud Policy Sample Test Cases.
+
+ The following data is created:
+
+ - Three domains: domainA, domainB and admin_domain
+ - One project, which name is 'project'
+ - domainA has three users: domain_admin_user, project_admin_user and
+ just_a_user:
+
+ - domain_admin_user has role 'admin' on domainA,
+ - project_admin_user has role 'admin' on the project,
+ - just_a_user has a non-admin role on both domainA and the project.
+ - admin_domain has user cloud_admin_user, with an 'admin' role
+ on admin_domain.
+
+ We test various api protection rules from the cloud sample policy
+ file to make sure the sample is valid and that we correctly enforce it.
+
+ """
+ # Ensure that test_v3.RestfulTestCase doesn't load its own
+ # sample data, which would make checking the results of our
+ # tests harder
+ super(IdentityTestv3CloudPolicySample, self).setUp()
+
+ # Finally, switch to the v3 sample policy file
+ self.addCleanup(rules.reset)
+ rules.reset()
+ self.config_fixture.config(
+ group='oslo_policy',
+ policy_file=tests.dirs.etc('policy.v3cloudsample.json'))
+
+ def load_sample_data(self):
+ # Start by creating a couple of domains
+ self._populate_default_domain()
+ self.domainA = self.new_domain_ref()
+ self.resource_api.create_domain(self.domainA['id'], self.domainA)
+ self.domainB = self.new_domain_ref()
+ self.resource_api.create_domain(self.domainB['id'], self.domainB)
+ self.admin_domain = {'id': 'admin_domain_id', 'name': 'Admin_domain'}
+ self.resource_api.create_domain(self.admin_domain['id'],
+ self.admin_domain)
+
+ # And our users
+ self.cloud_admin_user = self.new_user_ref(
+ domain_id=self.admin_domain['id'])
+ password = uuid.uuid4().hex
+ self.cloud_admin_user['password'] = password
+ self.cloud_admin_user = (
+ self.identity_api.create_user(self.cloud_admin_user))
+ self.cloud_admin_user['password'] = password
+ self.just_a_user = self.new_user_ref(domain_id=self.domainA['id'])
+ password = uuid.uuid4().hex
+ self.just_a_user['password'] = password
+ self.just_a_user = self.identity_api.create_user(self.just_a_user)
+ self.just_a_user['password'] = password
+ self.domain_admin_user = self.new_user_ref(
+ domain_id=self.domainA['id'])
+ password = uuid.uuid4().hex
+ self.domain_admin_user['password'] = password
+ self.domain_admin_user = (
+ self.identity_api.create_user(self.domain_admin_user))
+ self.domain_admin_user['password'] = password
+ self.project_admin_user = self.new_user_ref(
+ domain_id=self.domainA['id'])
+ password = uuid.uuid4().hex
+ self.project_admin_user['password'] = password
+ self.project_admin_user = (
+ self.identity_api.create_user(self.project_admin_user))
+ self.project_admin_user['password'] = password
+
+ # The admin role and another plain role
+ self.admin_role = {'id': uuid.uuid4().hex, 'name': 'admin'}
+ self.role_api.create_role(self.admin_role['id'], self.admin_role)
+ self.role = self.new_role_ref()
+ self.role_api.create_role(self.role['id'], self.role)
+
+ # The cloud admin just gets the admin role
+ self.assignment_api.create_grant(self.admin_role['id'],
+ user_id=self.cloud_admin_user['id'],
+ domain_id=self.admin_domain['id'])
+
+ # Assign roles to the domain
+ self.assignment_api.create_grant(self.admin_role['id'],
+ user_id=self.domain_admin_user['id'],
+ domain_id=self.domainA['id'])
+ self.assignment_api.create_grant(self.role['id'],
+ user_id=self.just_a_user['id'],
+ domain_id=self.domainA['id'])
+
+ # Create and assign roles to the project
+ self.project = self.new_project_ref(domain_id=self.domainA['id'])
+ self.resource_api.create_project(self.project['id'], self.project)
+ self.assignment_api.create_grant(self.admin_role['id'],
+ user_id=self.project_admin_user['id'],
+ project_id=self.project['id'])
+ self.assignment_api.create_grant(self.role['id'],
+ user_id=self.just_a_user['id'],
+ project_id=self.project['id'])
+
+ def _stati(self, expected_status):
+ # Return the expected return codes for APIs with and without data
+ # with any specified status overriding the normal values
+ if expected_status is None:
+ return (200, 201, 204)
+ else:
+ return (expected_status, expected_status, expected_status)
+
+ def _test_user_management(self, domain_id, expected=None):
+ status_OK, status_created, status_no_data = self._stati(expected)
+ entity_url = '/users/%s' % self.just_a_user['id']
+ list_url = '/users?domain_id=%s' % domain_id
+
+ self.get(entity_url, auth=self.auth,
+ expected_status=status_OK)
+ self.get(list_url, auth=self.auth,
+ expected_status=status_OK)
+ user = {'description': 'Updated'}
+ self.patch(entity_url, auth=self.auth, body={'user': user},
+ expected_status=status_OK)
+ self.delete(entity_url, auth=self.auth,
+ expected_status=status_no_data)
+
+ user_ref = self.new_user_ref(domain_id=domain_id)
+ self.post('/users', auth=self.auth, body={'user': user_ref},
+ expected_status=status_created)
+
+ def _test_project_management(self, domain_id, expected=None):
+ status_OK, status_created, status_no_data = self._stati(expected)
+ entity_url = '/projects/%s' % self.project['id']
+ list_url = '/projects?domain_id=%s' % domain_id
+
+ self.get(entity_url, auth=self.auth,
+ expected_status=status_OK)
+ self.get(list_url, auth=self.auth,
+ expected_status=status_OK)
+ project = {'description': 'Updated'}
+ self.patch(entity_url, auth=self.auth, body={'project': project},
+ expected_status=status_OK)
+ self.delete(entity_url, auth=self.auth,
+ expected_status=status_no_data)
+
+ proj_ref = self.new_project_ref(domain_id=domain_id)
+ self.post('/projects', auth=self.auth, body={'project': proj_ref},
+ expected_status=status_created)
+
+ def _test_domain_management(self, expected=None):
+ status_OK, status_created, status_no_data = self._stati(expected)
+ entity_url = '/domains/%s' % self.domainB['id']
+ list_url = '/domains'
+
+ self.get(entity_url, auth=self.auth,
+ expected_status=status_OK)
+ self.get(list_url, auth=self.auth,
+ expected_status=status_OK)
+ domain = {'description': 'Updated', 'enabled': False}
+ self.patch(entity_url, auth=self.auth, body={'domain': domain},
+ expected_status=status_OK)
+ self.delete(entity_url, auth=self.auth,
+ expected_status=status_no_data)
+
+ domain_ref = self.new_domain_ref()
+ self.post('/domains', auth=self.auth, body={'domain': domain_ref},
+ expected_status=status_created)
+
+ def _test_grants(self, target, entity_id, expected=None):
+ status_OK, status_created, status_no_data = self._stati(expected)
+ a_role = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.role_api.create_role(a_role['id'], a_role)
+
+ collection_url = (
+ '/%(target)s/%(target_id)s/users/%(user_id)s/roles' % {
+ 'target': target,
+ 'target_id': entity_id,
+ 'user_id': self.just_a_user['id']})
+ member_url = '%(collection_url)s/%(role_id)s' % {
+ 'collection_url': collection_url,
+ 'role_id': a_role['id']}
+
+ self.put(member_url, auth=self.auth,
+ expected_status=status_no_data)
+ self.head(member_url, auth=self.auth,
+ expected_status=status_no_data)
+ self.get(collection_url, auth=self.auth,
+ expected_status=status_OK)
+ self.delete(member_url, auth=self.auth,
+ expected_status=status_no_data)
+
+ def test_user_management(self):
+ # First, authenticate with a user that does not have the domain
+ # admin role - shouldn't be able to do much.
+ self.auth = self.build_authentication_request(
+ user_id=self.just_a_user['id'],
+ password=self.just_a_user['password'],
+ domain_id=self.domainA['id'])
+
+ self._test_user_management(
+ self.domainA['id'], expected=exception.ForbiddenAction.code)
+
+ # Now, authenticate with a user that does have the domain admin role
+ self.auth = self.build_authentication_request(
+ user_id=self.domain_admin_user['id'],
+ password=self.domain_admin_user['password'],
+ domain_id=self.domainA['id'])
+
+ self._test_user_management(self.domainA['id'])
+
+ def test_user_management_by_cloud_admin(self):
+ # Test users management with a cloud admin. This user should
+ # be able to manage users in any domain.
+ self.auth = self.build_authentication_request(
+ user_id=self.cloud_admin_user['id'],
+ password=self.cloud_admin_user['password'],
+ domain_id=self.admin_domain['id'])
+
+ self._test_user_management(self.domainA['id'])
+
+ def test_project_management(self):
+ # First, authenticate with a user that does not have the project
+ # admin role - shouldn't be able to do much.
+ self.auth = self.build_authentication_request(
+ user_id=self.just_a_user['id'],
+ password=self.just_a_user['password'],
+ domain_id=self.domainA['id'])
+
+ self._test_project_management(
+ self.domainA['id'], expected=exception.ForbiddenAction.code)
+
+ # ...but should still be able to list projects of which they are
+ # a member
+ url = '/users/%s/projects' % self.just_a_user['id']
+ self.get(url, auth=self.auth)
+
+ # Now, authenticate with a user that does have the domain admin role
+ self.auth = self.build_authentication_request(
+ user_id=self.domain_admin_user['id'],
+ password=self.domain_admin_user['password'],
+ domain_id=self.domainA['id'])
+
+ self._test_project_management(self.domainA['id'])
+
+ def test_project_management_by_cloud_admin(self):
+ self.auth = self.build_authentication_request(
+ user_id=self.cloud_admin_user['id'],
+ password=self.cloud_admin_user['password'],
+ domain_id=self.admin_domain['id'])
+
+ # Check whether cloud admin can operate a domain
+ # other than its own domain or not
+ self._test_project_management(self.domainA['id'])
+
+ def test_domain_grants(self):
+ self.auth = self.build_authentication_request(
+ user_id=self.just_a_user['id'],
+ password=self.just_a_user['password'],
+ domain_id=self.domainA['id'])
+
+ self._test_grants('domains', self.domainA['id'],
+ expected=exception.ForbiddenAction.code)
+
+ # Now, authenticate with a user that does have the domain admin role
+ self.auth = self.build_authentication_request(
+ user_id=self.domain_admin_user['id'],
+ password=self.domain_admin_user['password'],
+ domain_id=self.domainA['id'])
+
+ self._test_grants('domains', self.domainA['id'])
+
+ # Check that with such a token we cannot modify grants on a
+ # different domain
+ self._test_grants('domains', self.domainB['id'],
+ expected=exception.ForbiddenAction.code)
+
+ def test_domain_grants_by_cloud_admin(self):
+ # Test domain grants with a cloud admin. This user should be
+ # able to manage roles on any domain.
+ self.auth = self.build_authentication_request(
+ user_id=self.cloud_admin_user['id'],
+ password=self.cloud_admin_user['password'],
+ domain_id=self.admin_domain['id'])
+
+ self._test_grants('domains', self.domainA['id'])
+
+ def test_project_grants(self):
+ self.auth = self.build_authentication_request(
+ user_id=self.just_a_user['id'],
+ password=self.just_a_user['password'],
+ project_id=self.project['id'])
+
+ self._test_grants('projects', self.project['id'],
+ expected=exception.ForbiddenAction.code)
+
+ # Now, authenticate with a user that does have the project
+ # admin role
+ self.auth = self.build_authentication_request(
+ user_id=self.project_admin_user['id'],
+ password=self.project_admin_user['password'],
+ project_id=self.project['id'])
+
+ self._test_grants('projects', self.project['id'])
+
+ def test_project_grants_by_domain_admin(self):
+ # Test project grants with a domain admin. This user should be
+ # able to manage roles on any project in its own domain.
+ self.auth = self.build_authentication_request(
+ user_id=self.domain_admin_user['id'],
+ password=self.domain_admin_user['password'],
+ domain_id=self.domainA['id'])
+
+ self._test_grants('projects', self.project['id'])
+
+ def test_cloud_admin(self):
+ self.auth = self.build_authentication_request(
+ user_id=self.domain_admin_user['id'],
+ password=self.domain_admin_user['password'],
+ domain_id=self.domainA['id'])
+
+ self._test_domain_management(
+ expected=exception.ForbiddenAction.code)
+
+ self.auth = self.build_authentication_request(
+ user_id=self.cloud_admin_user['id'],
+ password=self.cloud_admin_user['password'],
+ domain_id=self.admin_domain['id'])
+
+ self._test_domain_management()
+
+ def test_list_user_credentials(self):
+ self.credential_user = self.new_credential_ref(self.just_a_user['id'])
+ self.credential_api.create_credential(self.credential_user['id'],
+ self.credential_user)
+ self.credential_admin = self.new_credential_ref(
+ self.cloud_admin_user['id'])
+ self.credential_api.create_credential(self.credential_admin['id'],
+ self.credential_admin)
+
+ self.auth = self.build_authentication_request(
+ user_id=self.just_a_user['id'],
+ password=self.just_a_user['password'])
+ url = '/credentials?user_id=%s' % self.just_a_user['id']
+ self.get(url, auth=self.auth)
+ url = '/credentials?user_id=%s' % self.cloud_admin_user['id']
+ self.get(url, auth=self.auth,
+ expected_status=exception.ForbiddenAction.code)
+ url = '/credentials'
+ self.get(url, auth=self.auth,
+ expected_status=exception.ForbiddenAction.code)
+
+ def test_get_and_delete_ec2_credentials(self):
+ """Tests getting and deleting ec2 credentials through the ec2 API."""
+ another_user = self.new_user_ref(domain_id=self.domainA['id'])
+ password = another_user['password']
+ another_user = self.identity_api.create_user(another_user)
+
+ # create a credential for just_a_user
+ just_user_auth = self.build_authentication_request(
+ user_id=self.just_a_user['id'],
+ password=self.just_a_user['password'],
+ project_id=self.project['id'])
+ url = '/users/%s/credentials/OS-EC2' % self.just_a_user['id']
+ r = self.post(url, body={'tenant_id': self.project['id']},
+ auth=just_user_auth)
+
+ # another normal user can't get the credential
+ another_user_auth = self.build_authentication_request(
+ user_id=another_user['id'],
+ password=password)
+ another_user_url = '/users/%s/credentials/OS-EC2/%s' % (
+ another_user['id'], r.result['credential']['access'])
+ self.get(another_user_url, auth=another_user_auth,
+ expected_status=exception.ForbiddenAction.code)
+
+ # the owner can get the credential
+ just_user_url = '/users/%s/credentials/OS-EC2/%s' % (
+ self.just_a_user['id'], r.result['credential']['access'])
+ self.get(just_user_url, auth=just_user_auth)
+
+ # another normal user can't delete the credential
+ self.delete(another_user_url, auth=another_user_auth,
+ expected_status=exception.ForbiddenAction.code)
+
+ # the owner can get the credential
+ self.delete(just_user_url, auth=just_user_auth)
+
+ def test_user_validate_same_token(self):
+ # Given a non-admin user token, the token can be used to validate
+ # itself.
+ # This is GET /v3/auth/tokens, with X-Auth-Token == X-Subject-Token
+ # FIXME(blk-u): This test fails, a user can't validate their own token,
+ # see bug 1421825.
+
+ auth = self.build_authentication_request(
+ user_id=self.just_a_user['id'],
+ password=self.just_a_user['password'])
+ token = self.get_requested_token(auth)
+
+ # FIXME(blk-u): remove expected_status=403.
+ self.get('/auth/tokens', token=token,
+ headers={'X-Subject-Token': token}, expected_status=403)
+
+ def test_user_validate_user_token(self):
+ # A user can validate one of their own tokens.
+ # This is GET /v3/auth/tokens
+ # FIXME(blk-u): This test fails, a user can't validate their own token,
+ # see bug 1421825.
+
+ auth = self.build_authentication_request(
+ user_id=self.just_a_user['id'],
+ password=self.just_a_user['password'])
+ token1 = self.get_requested_token(auth)
+ token2 = self.get_requested_token(auth)
+
+ # FIXME(blk-u): remove expected_status=403.
+ self.get('/auth/tokens', token=token1,
+ headers={'X-Subject-Token': token2}, expected_status=403)
+
+ def test_user_validate_other_user_token_rejected(self):
+ # A user cannot validate another user's token.
+ # This is GET /v3/auth/tokens
+
+ user1_auth = self.build_authentication_request(
+ user_id=self.just_a_user['id'],
+ password=self.just_a_user['password'])
+ user1_token = self.get_requested_token(user1_auth)
+
+ user2_auth = self.build_authentication_request(
+ user_id=self.cloud_admin_user['id'],
+ password=self.cloud_admin_user['password'])
+ user2_token = self.get_requested_token(user2_auth)
+
+ self.get('/auth/tokens', token=user1_token,
+ headers={'X-Subject-Token': user2_token}, expected_status=403)
+
+ def test_admin_validate_user_token(self):
+ # An admin can validate a user's token.
+ # This is GET /v3/auth/tokens
+
+ admin_auth = self.build_authentication_request(
+ user_id=self.cloud_admin_user['id'],
+ password=self.cloud_admin_user['password'],
+ domain_id=self.admin_domain['id'])
+ admin_token = self.get_requested_token(admin_auth)
+
+ user_auth = self.build_authentication_request(
+ user_id=self.just_a_user['id'],
+ password=self.just_a_user['password'])
+ user_token = self.get_requested_token(user_auth)
+
+ self.get('/auth/tokens', token=admin_token,
+ headers={'X-Subject-Token': user_token})
+
+ def test_user_check_same_token(self):
+ # Given a non-admin user token, the token can be used to check
+ # itself.
+ # This is HEAD /v3/auth/tokens, with X-Auth-Token == X-Subject-Token
+
+ auth = self.build_authentication_request(
+ user_id=self.just_a_user['id'],
+ password=self.just_a_user['password'])
+ token = self.get_requested_token(auth)
+
+ self.head('/auth/tokens', token=token,
+ headers={'X-Subject-Token': token}, expected_status=200)
+
+ def test_user_check_user_token(self):
+ # A user can check one of their own tokens.
+ # This is HEAD /v3/auth/tokens
+
+ auth = self.build_authentication_request(
+ user_id=self.just_a_user['id'],
+ password=self.just_a_user['password'])
+ token1 = self.get_requested_token(auth)
+ token2 = self.get_requested_token(auth)
+
+ self.head('/auth/tokens', token=token1,
+ headers={'X-Subject-Token': token2}, expected_status=200)
+
+ def test_user_check_other_user_token_rejected(self):
+ # A user cannot check another user's token.
+ # This is HEAD /v3/auth/tokens
+
+ user1_auth = self.build_authentication_request(
+ user_id=self.just_a_user['id'],
+ password=self.just_a_user['password'])
+ user1_token = self.get_requested_token(user1_auth)
+
+ user2_auth = self.build_authentication_request(
+ user_id=self.cloud_admin_user['id'],
+ password=self.cloud_admin_user['password'])
+ user2_token = self.get_requested_token(user2_auth)
+
+ self.head('/auth/tokens', token=user1_token,
+ headers={'X-Subject-Token': user2_token},
+ expected_status=403)
+
+ def test_admin_check_user_token(self):
+ # An admin can check a user's token.
+ # This is HEAD /v3/auth/tokens
+
+ admin_auth = self.build_authentication_request(
+ user_id=self.domain_admin_user['id'],
+ password=self.domain_admin_user['password'],
+ domain_id=self.domainA['id'])
+ admin_token = self.get_requested_token(admin_auth)
+
+ user_auth = self.build_authentication_request(
+ user_id=self.just_a_user['id'],
+ password=self.just_a_user['password'])
+ user_token = self.get_requested_token(user_auth)
+
+ self.head('/auth/tokens', token=admin_token,
+ headers={'X-Subject-Token': user_token}, expected_status=200)
+
+ def test_user_revoke_same_token(self):
+ # Given a non-admin user token, the token can be used to revoke
+ # itself.
+ # This is DELETE /v3/auth/tokens, with X-Auth-Token == X-Subject-Token
+
+ auth = self.build_authentication_request(
+ user_id=self.just_a_user['id'],
+ password=self.just_a_user['password'])
+ token = self.get_requested_token(auth)
+
+ self.delete('/auth/tokens', token=token,
+ headers={'X-Subject-Token': token})
+
+ def test_user_revoke_user_token(self):
+ # A user can revoke one of their own tokens.
+ # This is DELETE /v3/auth/tokens
+
+ auth = self.build_authentication_request(
+ user_id=self.just_a_user['id'],
+ password=self.just_a_user['password'])
+ token1 = self.get_requested_token(auth)
+ token2 = self.get_requested_token(auth)
+
+ self.delete('/auth/tokens', token=token1,
+ headers={'X-Subject-Token': token2})
+
+ def test_user_revoke_other_user_token_rejected(self):
+ # A user cannot revoke another user's token.
+ # This is DELETE /v3/auth/tokens
+
+ user1_auth = self.build_authentication_request(
+ user_id=self.just_a_user['id'],
+ password=self.just_a_user['password'])
+ user1_token = self.get_requested_token(user1_auth)
+
+ user2_auth = self.build_authentication_request(
+ user_id=self.cloud_admin_user['id'],
+ password=self.cloud_admin_user['password'])
+ user2_token = self.get_requested_token(user2_auth)
+
+ self.delete('/auth/tokens', token=user1_token,
+ headers={'X-Subject-Token': user2_token},
+ expected_status=403)
+
+ def test_admin_revoke_user_token(self):
+ # An admin can revoke a user's token.
+ # This is DELETE /v3/auth/tokens
+
+ admin_auth = self.build_authentication_request(
+ user_id=self.domain_admin_user['id'],
+ password=self.domain_admin_user['password'],
+ domain_id=self.domainA['id'])
+ admin_token = self.get_requested_token(admin_auth)
+
+ user_auth = self.build_authentication_request(
+ user_id=self.just_a_user['id'],
+ password=self.just_a_user['password'])
+ user_token = self.get_requested_token(user_auth)
+
+ self.delete('/auth/tokens', token=admin_token,
+ headers={'X-Subject-Token': user_token})
diff --git a/keystone-moon/keystone/tests/unit/test_validation.py b/keystone-moon/keystone/tests/unit/test_validation.py
new file mode 100644
index 00000000..f83cabcb
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/test_validation.py
@@ -0,0 +1,1563 @@
+# -*- coding: utf-8 -*-
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import uuid
+
+import testtools
+
+from keystone.assignment import schema as assignment_schema
+from keystone.catalog import schema as catalog_schema
+from keystone.common import validation
+from keystone.common.validation import parameter_types
+from keystone.common.validation import validators
+from keystone.contrib.endpoint_filter import schema as endpoint_filter_schema
+from keystone.contrib.federation import schema as federation_schema
+from keystone.credential import schema as credential_schema
+from keystone import exception
+from keystone.policy import schema as policy_schema
+from keystone.resource import schema as resource_schema
+from keystone.trust import schema as trust_schema
+
+"""Example model to validate create requests against. Assume that this is
+the only backend for the create and validate schemas. This is just an
+example to show how a backend can be used to construct a schema. In
+Keystone, schemas are built according to the Identity API and the backends
+available in Keystone. This example does not mean that all schema in
+Keystone were strictly based on the SQL backends.
+
+class Entity(sql.ModelBase):
+ __tablename__ = 'entity'
+ attributes = ['id', 'name', 'domain_id', 'description']
+ id = sql.Column(sql.String(64), primary_key=True)
+ name = sql.Column(sql.String(255), nullable=False)
+ description = sql.Column(sql.Text(), nullable=True)
+ enabled = sql.Column(sql.Boolean, default=True, nullable=False)
+ url = sql.Column(sql.String(225), nullable=True)
+ email = sql.Column(sql.String(64), nullable=True)
+"""
+
+# Test schema to validate create requests against
+
+_entity_properties = {
+ 'name': parameter_types.name,
+ 'description': validation.nullable(parameter_types.description),
+ 'enabled': parameter_types.boolean,
+ 'url': validation.nullable(parameter_types.url),
+ 'email': validation.nullable(parameter_types.email),
+ 'id_string': validation.nullable(parameter_types.id_string)
+}
+
+entity_create = {
+ 'type': 'object',
+ 'properties': _entity_properties,
+ 'required': ['name'],
+ 'additionalProperties': True,
+}
+
+entity_update = {
+ 'type': 'object',
+ 'properties': _entity_properties,
+ 'minProperties': 1,
+ 'additionalProperties': True,
+}
+
+_VALID_ENABLED_FORMATS = [True, False]
+
+_INVALID_ENABLED_FORMATS = ['some string', 1, 0, 'True', 'False']
+
+_VALID_URLS = ['https://example.com', 'http://EXAMPLE.com/v3',
+ 'http://localhost', 'http://127.0.0.1:5000',
+ 'http://1.1.1.1', 'http://255.255.255.255',
+ 'http://[::1]', 'http://[::1]:35357',
+ 'http://[1::8]', 'http://[fe80::8%25eth0]',
+ 'http://[::1.2.3.4]', 'http://[2001:DB8::1.2.3.4]',
+ 'http://[::a:1.2.3.4]', 'http://[a::b:1.2.3.4]',
+ 'http://[1:2:3:4:5:6:7:8]', 'http://[1:2:3:4:5:6:1.2.3.4]',
+ 'http://[abcd:efAB:CDEF:1111:9999::]']
+
+_INVALID_URLS = [False, 'this is not a URL', 1234, 'www.example.com',
+ 'localhost', 'http//something.com',
+ 'https//something.com']
+
+_VALID_FILTERS = [{'interface': 'admin'},
+ {'region': 'US-WEST',
+ 'interface': 'internal'}]
+
+_INVALID_FILTERS = ['some string', 1, 0, True, False]
+
+
+class EntityValidationTestCase(testtools.TestCase):
+
+ def setUp(self):
+ super(EntityValidationTestCase, self).setUp()
+ self.resource_name = 'some resource name'
+ self.description = 'Some valid description'
+ self.valid_enabled = True
+ self.valid_url = 'http://example.com'
+ self.valid_email = 'joe@example.com'
+ self.create_schema_validator = validators.SchemaValidator(
+ entity_create)
+ self.update_schema_validator = validators.SchemaValidator(
+ entity_update)
+
+ def test_create_entity_with_all_valid_parameters_validates(self):
+ """Validate all parameter values against test schema."""
+ request_to_validate = {'name': self.resource_name,
+ 'description': self.description,
+ 'enabled': self.valid_enabled,
+ 'url': self.valid_url,
+ 'email': self.valid_email}
+ self.create_schema_validator.validate(request_to_validate)
+
+ def test_create_entity_with_only_required_valid_parameters_validates(self):
+ """Validate correct for only parameters values against test schema."""
+ request_to_validate = {'name': self.resource_name}
+ self.create_schema_validator.validate(request_to_validate)
+
+ def test_create_entity_with_name_too_long_raises_exception(self):
+ """Validate long names.
+
+ Validate that an exception is raised when validating a string of 255+
+ characters passed in as a name.
+ """
+ invalid_name = 'a' * 256
+ request_to_validate = {'name': invalid_name}
+ self.assertRaises(exception.SchemaValidationError,
+ self.create_schema_validator.validate,
+ request_to_validate)
+
+ def test_create_entity_with_name_too_short_raises_exception(self):
+ """Validate short names.
+
+ Test that an exception is raised when passing a string of length
+ zero as a name parameter.
+ """
+ request_to_validate = {'name': ''}
+ self.assertRaises(exception.SchemaValidationError,
+ self.create_schema_validator.validate,
+ request_to_validate)
+
+ def test_create_entity_with_unicode_name_validates(self):
+ """Test that we successfully validate a unicode string."""
+ request_to_validate = {'name': u'αβγδ'}
+ self.create_schema_validator.validate(request_to_validate)
+
+ def test_create_entity_with_invalid_enabled_format_raises_exception(self):
+ """Validate invalid enabled formats.
+
+ Test that an exception is raised when passing invalid boolean-like
+ values as `enabled`.
+ """
+ for format in _INVALID_ENABLED_FORMATS:
+ request_to_validate = {'name': self.resource_name,
+ 'enabled': format}
+ self.assertRaises(exception.SchemaValidationError,
+ self.create_schema_validator.validate,
+ request_to_validate)
+
+ def test_create_entity_with_valid_enabled_formats_validates(self):
+ """Validate valid enabled formats.
+
+ Test that we have successful validation on boolean values for
+ `enabled`.
+ """
+ for valid_enabled in _VALID_ENABLED_FORMATS:
+ request_to_validate = {'name': self.resource_name,
+ 'enabled': valid_enabled}
+ # Make sure validation doesn't raise a validation exception
+ self.create_schema_validator.validate(request_to_validate)
+
+ def test_create_entity_with_valid_urls_validates(self):
+ """Test that proper urls are successfully validated."""
+ for valid_url in _VALID_URLS:
+ request_to_validate = {'name': self.resource_name,
+ 'url': valid_url}
+ self.create_schema_validator.validate(request_to_validate)
+
+ def test_create_entity_with_invalid_urls_fails(self):
+ """Test that an exception is raised when validating improper urls."""
+ for invalid_url in _INVALID_URLS:
+ request_to_validate = {'name': self.resource_name,
+ 'url': invalid_url}
+ self.assertRaises(exception.SchemaValidationError,
+ self.create_schema_validator.validate,
+ request_to_validate)
+
+ def test_create_entity_with_valid_email_validates(self):
+ """Validate email address
+
+ Test that we successfully validate properly formatted email
+ addresses.
+ """
+ request_to_validate = {'name': self.resource_name,
+ 'email': self.valid_email}
+ self.create_schema_validator.validate(request_to_validate)
+
+ def test_create_entity_with_invalid_email_fails(self):
+ """Validate invalid email address.
+
+ Test that an exception is raised when validating improperly
+ formatted email addresses.
+ """
+ request_to_validate = {'name': self.resource_name,
+ 'email': 'some invalid email value'}
+ self.assertRaises(exception.SchemaValidationError,
+ self.create_schema_validator.validate,
+ request_to_validate)
+
+ def test_create_entity_with_valid_id_strings(self):
+ """Validate acceptable id strings."""
+ valid_id_strings = [str(uuid.uuid4()), uuid.uuid4().hex, 'default']
+ for valid_id in valid_id_strings:
+ request_to_validate = {'name': self.resource_name,
+ 'id_string': valid_id}
+ self.create_schema_validator.validate(request_to_validate)
+
+ def test_create_entity_with_invalid_id_strings(self):
+ """Exception raised when using invalid id strings."""
+ long_string = 'A' * 65
+ invalid_id_strings = ['', long_string, 'this,should,fail']
+ for invalid_id in invalid_id_strings:
+ request_to_validate = {'name': self.resource_name,
+ 'id_string': invalid_id}
+ self.assertRaises(exception.SchemaValidationError,
+ self.create_schema_validator.validate,
+ request_to_validate)
+
+ def test_create_entity_with_null_id_string(self):
+ """Validate that None is an acceptable optional string type."""
+ request_to_validate = {'name': self.resource_name,
+ 'id_string': None}
+ self.create_schema_validator.validate(request_to_validate)
+
+ def test_create_entity_with_null_string_succeeds(self):
+ """Exception raised when passing None on required id strings."""
+ request_to_validate = {'name': self.resource_name,
+ 'id_string': None}
+ self.create_schema_validator.validate(request_to_validate)
+
+ def test_update_entity_with_no_parameters_fails(self):
+ """At least one parameter needs to be present for an update."""
+ request_to_validate = {}
+ self.assertRaises(exception.SchemaValidationError,
+ self.update_schema_validator.validate,
+ request_to_validate)
+
+ def test_update_entity_with_all_parameters_valid_validates(self):
+ """Simulate updating an entity by ID."""
+ request_to_validate = {'name': self.resource_name,
+ 'description': self.description,
+ 'enabled': self.valid_enabled,
+ 'url': self.valid_url,
+ 'email': self.valid_email}
+ self.update_schema_validator.validate(request_to_validate)
+
+ def test_update_entity_with_a_valid_required_parameter_validates(self):
+ """Succeed if a valid required parameter is provided."""
+ request_to_validate = {'name': self.resource_name}
+ self.update_schema_validator.validate(request_to_validate)
+
+ def test_update_entity_with_invalid_required_parameter_fails(self):
+ """Fail if a provided required parameter is invalid."""
+ request_to_validate = {'name': 'a' * 256}
+ self.assertRaises(exception.SchemaValidationError,
+ self.update_schema_validator.validate,
+ request_to_validate)
+
+ def test_update_entity_with_a_null_optional_parameter_validates(self):
+ """Optional parameters can be null to removed the value."""
+ request_to_validate = {'email': None}
+ self.update_schema_validator.validate(request_to_validate)
+
+ def test_update_entity_with_a_required_null_parameter_fails(self):
+ """The `name` parameter can't be null."""
+ request_to_validate = {'name': None}
+ self.assertRaises(exception.SchemaValidationError,
+ self.update_schema_validator.validate,
+ request_to_validate)
+
+ def test_update_entity_with_a_valid_optional_parameter_validates(self):
+ """Succeeds with only a single valid optional parameter."""
+ request_to_validate = {'email': self.valid_email}
+ self.update_schema_validator.validate(request_to_validate)
+
+ def test_update_entity_with_invalid_optional_parameter_fails(self):
+ """Fails when an optional parameter is invalid."""
+ request_to_validate = {'email': 0}
+ self.assertRaises(exception.SchemaValidationError,
+ self.update_schema_validator.validate,
+ request_to_validate)
+
+
+class ProjectValidationTestCase(testtools.TestCase):
+ """Test for V3 Project API validation."""
+
+ def setUp(self):
+ super(ProjectValidationTestCase, self).setUp()
+
+ self.project_name = 'My Project'
+
+ create = resource_schema.project_create
+ update = resource_schema.project_update
+ self.create_project_validator = validators.SchemaValidator(create)
+ self.update_project_validator = validators.SchemaValidator(update)
+
+ def test_validate_project_request(self):
+ """Test that we validate a project with `name` in request."""
+ request_to_validate = {'name': self.project_name}
+ self.create_project_validator.validate(request_to_validate)
+
+ def test_validate_project_request_without_name_fails(self):
+ """Validate project request fails without name."""
+ request_to_validate = {'enabled': True}
+ self.assertRaises(exception.SchemaValidationError,
+ self.create_project_validator.validate,
+ request_to_validate)
+
+ def test_validate_project_request_with_enabled(self):
+ """Validate `enabled` as boolean-like values for projects."""
+ for valid_enabled in _VALID_ENABLED_FORMATS:
+ request_to_validate = {'name': self.project_name,
+ 'enabled': valid_enabled}
+ self.create_project_validator.validate(request_to_validate)
+
+ def test_validate_project_request_with_invalid_enabled_fails(self):
+ """Exception is raised when `enabled` isn't a boolean-like value."""
+ for invalid_enabled in _INVALID_ENABLED_FORMATS:
+ request_to_validate = {'name': self.project_name,
+ 'enabled': invalid_enabled}
+ self.assertRaises(exception.SchemaValidationError,
+ self.create_project_validator.validate,
+ request_to_validate)
+
+ def test_validate_project_request_with_valid_description(self):
+ """Test that we validate `description` in create project requests."""
+ request_to_validate = {'name': self.project_name,
+ 'description': 'My Project'}
+ self.create_project_validator.validate(request_to_validate)
+
+ def test_validate_project_request_with_invalid_description_fails(self):
+ """Exception is raised when `description` as a non-string value."""
+ request_to_validate = {'name': self.project_name,
+ 'description': False}
+ self.assertRaises(exception.SchemaValidationError,
+ self.create_project_validator.validate,
+ request_to_validate)
+
+ def test_validate_project_request_with_name_too_long(self):
+ """Exception is raised when `name` is too long."""
+ long_project_name = 'a' * 65
+ request_to_validate = {'name': long_project_name}
+ self.assertRaises(exception.SchemaValidationError,
+ self.create_project_validator.validate,
+ request_to_validate)
+
+ def test_validate_project_request_with_name_too_short(self):
+ """Exception raised when `name` is too short."""
+ request_to_validate = {'name': ''}
+ self.assertRaises(exception.SchemaValidationError,
+ self.create_project_validator.validate,
+ request_to_validate)
+
+ def test_validate_project_request_with_valid_parent_id(self):
+ """Test that we validate `parent_id` in create project requests."""
+ # parent_id is nullable
+ request_to_validate = {'name': self.project_name,
+ 'parent_id': None}
+ self.create_project_validator.validate(request_to_validate)
+ request_to_validate = {'name': self.project_name,
+ 'parent_id': uuid.uuid4().hex}
+ self.create_project_validator.validate(request_to_validate)
+
+ def test_validate_project_request_with_invalid_parent_id_fails(self):
+ """Exception is raised when `parent_id` as a non-id value."""
+ request_to_validate = {'name': self.project_name,
+ 'parent_id': False}
+ self.assertRaises(exception.SchemaValidationError,
+ self.create_project_validator.validate,
+ request_to_validate)
+ request_to_validate = {'name': self.project_name,
+ 'parent_id': 'fake project'}
+ self.assertRaises(exception.SchemaValidationError,
+ self.create_project_validator.validate,
+ request_to_validate)
+
+ def test_validate_project_update_request(self):
+ """Test that we validate a project update request."""
+ request_to_validate = {'domain_id': uuid.uuid4().hex}
+ self.update_project_validator.validate(request_to_validate)
+
+ def test_validate_project_update_request_with_no_parameters_fails(self):
+ """Exception is raised when updating project without parameters."""
+ request_to_validate = {}
+ self.assertRaises(exception.SchemaValidationError,
+ self.update_project_validator.validate,
+ request_to_validate)
+
+ def test_validate_project_update_request_with_name_too_long_fails(self):
+ """Exception raised when updating a project with `name` too long."""
+ long_project_name = 'a' * 65
+ request_to_validate = {'name': long_project_name}
+ self.assertRaises(exception.SchemaValidationError,
+ self.update_project_validator.validate,
+ request_to_validate)
+
+ def test_validate_project_update_request_with_name_too_short_fails(self):
+ """Exception raised when updating a project with `name` too short."""
+ request_to_validate = {'name': ''}
+ self.assertRaises(exception.SchemaValidationError,
+ self.update_project_validator.validate,
+ request_to_validate)
+
+ def test_validate_project_update_request_with_null_domain_id_fails(self):
+ request_to_validate = {'domain_id': None}
+ self.assertRaises(exception.SchemaValidationError,
+ self.update_project_validator.validate,
+ request_to_validate)
+
+
+class DomainValidationTestCase(testtools.TestCase):
+ """Test for V3 Domain API validation."""
+
+ def setUp(self):
+ super(DomainValidationTestCase, self).setUp()
+
+ self.domain_name = 'My Domain'
+
+ create = resource_schema.domain_create
+ update = resource_schema.domain_update
+ self.create_domain_validator = validators.SchemaValidator(create)
+ self.update_domain_validator = validators.SchemaValidator(update)
+
+ def test_validate_domain_request(self):
+ """Make sure we successfully validate a create domain request."""
+ request_to_validate = {'name': self.domain_name}
+ self.create_domain_validator.validate(request_to_validate)
+
+ def test_validate_domain_request_without_name_fails(self):
+ """Make sure we raise an exception when `name` isn't included."""
+ request_to_validate = {'enabled': True}
+ self.assertRaises(exception.SchemaValidationError,
+ self.create_domain_validator.validate,
+ request_to_validate)
+
+ def test_validate_domain_request_with_enabled(self):
+ """Validate `enabled` as boolean-like values for domains."""
+ for valid_enabled in _VALID_ENABLED_FORMATS:
+ request_to_validate = {'name': self.domain_name,
+ 'enabled': valid_enabled}
+ self.create_domain_validator.validate(request_to_validate)
+
+ def test_validate_domain_request_with_invalid_enabled_fails(self):
+ """Exception is raised when `enabled` isn't a boolean-like value."""
+ for invalid_enabled in _INVALID_ENABLED_FORMATS:
+ request_to_validate = {'name': self.domain_name,
+ 'enabled': invalid_enabled}
+ self.assertRaises(exception.SchemaValidationError,
+ self.create_domain_validator.validate,
+ request_to_validate)
+
+ def test_validate_domain_request_with_valid_description(self):
+ """Test that we validate `description` in create domain requests."""
+ request_to_validate = {'name': self.domain_name,
+ 'description': 'My Domain'}
+ self.create_domain_validator.validate(request_to_validate)
+
+ def test_validate_domain_request_with_invalid_description_fails(self):
+ """Exception is raised when `description` is a non-string value."""
+ request_to_validate = {'name': self.domain_name,
+ 'description': False}
+ self.assertRaises(exception.SchemaValidationError,
+ self.create_domain_validator.validate,
+ request_to_validate)
+
+ def test_validate_domain_request_with_name_too_long(self):
+ """Exception is raised when `name` is too long."""
+ long_domain_name = 'a' * 65
+ request_to_validate = {'name': long_domain_name}
+ self.assertRaises(exception.SchemaValidationError,
+ self.create_domain_validator.validate,
+ request_to_validate)
+
+ def test_validate_domain_request_with_name_too_short(self):
+ """Exception raised when `name` is too short."""
+ request_to_validate = {'name': ''}
+ self.assertRaises(exception.SchemaValidationError,
+ self.create_domain_validator.validate,
+ request_to_validate)
+
+ def test_validate_domain_update_request(self):
+ """Test that we validate a domain update request."""
+ request_to_validate = {'domain_id': uuid.uuid4().hex}
+ self.update_domain_validator.validate(request_to_validate)
+
+ def test_validate_domain_update_request_with_no_parameters_fails(self):
+ """Exception is raised when updating a domain without parameters."""
+ request_to_validate = {}
+ self.assertRaises(exception.SchemaValidationError,
+ self.update_domain_validator.validate,
+ request_to_validate)
+
+ def test_validate_domain_update_request_with_name_too_long_fails(self):
+ """Exception raised when updating a domain with `name` too long."""
+ long_domain_name = 'a' * 65
+ request_to_validate = {'name': long_domain_name}
+ self.assertRaises(exception.SchemaValidationError,
+ self.update_domain_validator.validate,
+ request_to_validate)
+
+ def test_validate_domain_update_request_with_name_too_short_fails(self):
+ """Exception raised when updating a domain with `name` too short."""
+ request_to_validate = {'name': ''}
+ self.assertRaises(exception.SchemaValidationError,
+ self.update_domain_validator.validate,
+ request_to_validate)
+
+
+class RoleValidationTestCase(testtools.TestCase):
+ """Test for V3 Role API validation."""
+
+ def setUp(self):
+ super(RoleValidationTestCase, self).setUp()
+
+ self.role_name = 'My Role'
+
+ create = assignment_schema.role_create
+ update = assignment_schema.role_update
+ self.create_role_validator = validators.SchemaValidator(create)
+ self.update_role_validator = validators.SchemaValidator(update)
+
+ def test_validate_role_request(self):
+ """Test we can successfully validate a create role request."""
+ request_to_validate = {'name': self.role_name}
+ self.create_role_validator.validate(request_to_validate)
+
+ def test_validate_role_create_without_name_raises_exception(self):
+ """Test that we raise an exception when `name` isn't included."""
+ request_to_validate = {'enabled': True}
+ self.assertRaises(exception.SchemaValidationError,
+ self.create_role_validator.validate,
+ request_to_validate)
+
+ def test_validate_role_create_when_name_is_not_string_fails(self):
+ """Exception is raised on role create with a non-string `name`."""
+ request_to_validate = {'name': True}
+ self.assertRaises(exception.SchemaValidationError,
+ self.create_role_validator.validate,
+ request_to_validate)
+ request_to_validate = {'name': 24}
+ self.assertRaises(exception.SchemaValidationError,
+ self.create_role_validator.validate,
+ request_to_validate)
+
+ def test_validate_role_update_request(self):
+ """Test that we validate a role update request."""
+ request_to_validate = {'name': 'My New Role'}
+ self.update_role_validator.validate(request_to_validate)
+
+ def test_validate_role_update_fails_with_invalid_name_fails(self):
+ """Exception when validating an update request with invalid `name`."""
+ request_to_validate = {'name': True}
+ self.assertRaises(exception.SchemaValidationError,
+ self.update_role_validator.validate,
+ request_to_validate)
+
+ request_to_validate = {'name': 24}
+ self.assertRaises(exception.SchemaValidationError,
+ self.update_role_validator.validate,
+ request_to_validate)
+
+
+class PolicyValidationTestCase(testtools.TestCase):
+ """Test for V3 Policy API validation."""
+
+ def setUp(self):
+ super(PolicyValidationTestCase, self).setUp()
+
+ create = policy_schema.policy_create
+ update = policy_schema.policy_update
+ self.create_policy_validator = validators.SchemaValidator(create)
+ self.update_policy_validator = validators.SchemaValidator(update)
+
+ def test_validate_policy_succeeds(self):
+ """Test that we validate a create policy request."""
+ request_to_validate = {'blob': 'some blob information',
+ 'type': 'application/json'}
+ self.create_policy_validator.validate(request_to_validate)
+
+ def test_validate_policy_without_blob_fails(self):
+ """Exception raised without `blob` in request."""
+ request_to_validate = {'type': 'application/json'}
+ self.assertRaises(exception.SchemaValidationError,
+ self.create_policy_validator.validate,
+ request_to_validate)
+
+ def test_validate_policy_without_type_fails(self):
+ """Exception raised without `type` in request."""
+ request_to_validate = {'blob': 'some blob information'}
+ self.assertRaises(exception.SchemaValidationError,
+ self.create_policy_validator.validate,
+ request_to_validate)
+
+ def test_validate_policy_create_with_extra_parameters_succeeds(self):
+ """Validate policy create with extra parameters."""
+ request_to_validate = {'blob': 'some blob information',
+ 'type': 'application/json',
+ 'extra': 'some extra stuff'}
+ self.create_policy_validator.validate(request_to_validate)
+
+ def test_validate_policy_create_with_invalid_type_fails(self):
+ """Exception raised when `blob` and `type` are boolean."""
+ for prop in ['blob', 'type']:
+ request_to_validate = {prop: False}
+ self.assertRaises(exception.SchemaValidationError,
+ self.create_policy_validator.validate,
+ request_to_validate)
+
+ def test_validate_policy_update_without_parameters_fails(self):
+ """Exception raised when updating policy without parameters."""
+ request_to_validate = {}
+ self.assertRaises(exception.SchemaValidationError,
+ self.update_policy_validator.validate,
+ request_to_validate)
+
+ def test_validate_policy_update_with_extra_parameters_succeeds(self):
+ """Validate policy update request with extra parameters."""
+ request_to_validate = {'blob': 'some blob information',
+ 'type': 'application/json',
+ 'extra': 'some extra stuff'}
+ self.update_policy_validator.validate(request_to_validate)
+
+ def test_validate_policy_update_succeeds(self):
+ """Test that we validate a policy update request."""
+ request_to_validate = {'blob': 'some blob information',
+ 'type': 'application/json'}
+ self.update_policy_validator.validate(request_to_validate)
+
+ def test_validate_policy_update_with_invalid_type_fails(self):
+ """Exception raised when invalid `type` on policy update."""
+ for prop in ['blob', 'type']:
+ request_to_validate = {prop: False}
+ self.assertRaises(exception.SchemaValidationError,
+ self.update_policy_validator.validate,
+ request_to_validate)
+
+
+class CredentialValidationTestCase(testtools.TestCase):
+ """Test for V3 Credential API validation."""
+
+ def setUp(self):
+ super(CredentialValidationTestCase, self).setUp()
+
+ create = credential_schema.credential_create
+ update = credential_schema.credential_update
+ self.create_credential_validator = validators.SchemaValidator(create)
+ self.update_credential_validator = validators.SchemaValidator(update)
+
+ def test_validate_credential_succeeds(self):
+ """Test that we validate a credential request."""
+ request_to_validate = {'blob': 'some string',
+ 'project_id': uuid.uuid4().hex,
+ 'type': 'ec2',
+ 'user_id': uuid.uuid4().hex}
+ self.create_credential_validator.validate(request_to_validate)
+
+ def test_validate_credential_without_blob_fails(self):
+ """Exception raised without `blob` in create request."""
+ request_to_validate = {'type': 'ec2',
+ 'user_id': uuid.uuid4().hex}
+ self.assertRaises(exception.SchemaValidationError,
+ self.create_credential_validator.validate,
+ request_to_validate)
+
+ def test_validate_credential_without_user_id_fails(self):
+ """Exception raised without `user_id` in create request."""
+ request_to_validate = {'blob': 'some credential blob',
+ 'type': 'ec2'}
+ self.assertRaises(exception.SchemaValidationError,
+ self.create_credential_validator.validate,
+ request_to_validate)
+
+ def test_validate_credential_without_type_fails(self):
+ """Exception raised without `type` in create request."""
+ request_to_validate = {'blob': 'some credential blob',
+ 'user_id': uuid.uuid4().hex}
+ self.assertRaises(exception.SchemaValidationError,
+ self.create_credential_validator.validate,
+ request_to_validate)
+
+ def test_validate_credential_ec2_without_project_id_fails(self):
+ """Validate `project_id` is required for ec2.
+
+ Test that a SchemaValidationError is raised when type is ec2
+ and no `project_id` is provided in create request.
+ """
+ request_to_validate = {'blob': 'some credential blob',
+ 'type': 'ec2',
+ 'user_id': uuid.uuid4().hex}
+ self.assertRaises(exception.SchemaValidationError,
+ self.create_credential_validator.validate,
+ request_to_validate)
+
+ def test_validate_credential_with_project_id_succeeds(self):
+ """Test that credential request works for all types."""
+ cred_types = ['ec2', 'cert', uuid.uuid4().hex]
+
+ for c_type in cred_types:
+ request_to_validate = {'blob': 'some blob',
+ 'project_id': uuid.uuid4().hex,
+ 'type': c_type,
+ 'user_id': uuid.uuid4().hex}
+ # Make sure an exception isn't raised
+ self.create_credential_validator.validate(request_to_validate)
+
+ def test_validate_credential_non_ec2_without_project_id_succeeds(self):
+ """Validate `project_id` is not required for non-ec2.
+
+ Test that create request without `project_id` succeeds for any
+ non-ec2 credential.
+ """
+ cred_types = ['cert', uuid.uuid4().hex]
+
+ for c_type in cred_types:
+ request_to_validate = {'blob': 'some blob',
+ 'type': c_type,
+ 'user_id': uuid.uuid4().hex}
+ # Make sure an exception isn't raised
+ self.create_credential_validator.validate(request_to_validate)
+
+ def test_validate_credential_with_extra_parameters_succeeds(self):
+ """Validate create request with extra parameters."""
+ request_to_validate = {'blob': 'some string',
+ 'extra': False,
+ 'project_id': uuid.uuid4().hex,
+ 'type': 'ec2',
+ 'user_id': uuid.uuid4().hex}
+ self.create_credential_validator.validate(request_to_validate)
+
+ def test_validate_credential_update_succeeds(self):
+ """Test that a credential request is properly validated."""
+ request_to_validate = {'blob': 'some string',
+ 'project_id': uuid.uuid4().hex,
+ 'type': 'ec2',
+ 'user_id': uuid.uuid4().hex}
+ self.update_credential_validator.validate(request_to_validate)
+
+ def test_validate_credential_update_without_parameters_fails(self):
+ """Exception is raised on update without parameters."""
+ request_to_validate = {}
+ self.assertRaises(exception.SchemaValidationError,
+ self.update_credential_validator.validate,
+ request_to_validate)
+
+ def test_validate_credential_update_with_extra_parameters_succeeds(self):
+ """Validate credential update with extra parameters."""
+ request_to_validate = {'blob': 'some string',
+ 'extra': False,
+ 'project_id': uuid.uuid4().hex,
+ 'type': 'ec2',
+ 'user_id': uuid.uuid4().hex}
+ self.update_credential_validator.validate(request_to_validate)
+
+
+class RegionValidationTestCase(testtools.TestCase):
+ """Test for V3 Region API validation."""
+
+ def setUp(self):
+ super(RegionValidationTestCase, self).setUp()
+
+ self.region_name = 'My Region'
+
+ create = catalog_schema.region_create
+ update = catalog_schema.region_update
+ self.create_region_validator = validators.SchemaValidator(create)
+ self.update_region_validator = validators.SchemaValidator(update)
+
+ def test_validate_region_request(self):
+ """Test that we validate a basic region request."""
+ # Create_region doesn't take any parameters in the request so let's
+ # make sure we cover that case.
+ request_to_validate = {}
+ self.create_region_validator.validate(request_to_validate)
+
+ def test_validate_region_create_request_with_parameters(self):
+ """Test that we validate a region request with parameters."""
+ request_to_validate = {'id': 'us-east',
+ 'description': 'US East Region',
+ 'parent_region_id': 'US Region'}
+ self.create_region_validator.validate(request_to_validate)
+
+ def test_validate_region_create_with_uuid(self):
+ """Test that we validate a region request with a UUID as the id."""
+ request_to_validate = {'id': uuid.uuid4().hex,
+ 'description': 'US East Region',
+ 'parent_region_id': uuid.uuid4().hex}
+ self.create_region_validator.validate(request_to_validate)
+
+ def test_validate_region_create_succeeds_with_extra_parameters(self):
+ """Validate create region request with extra values."""
+ request_to_validate = {'other_attr': uuid.uuid4().hex}
+ self.create_region_validator.validate(request_to_validate)
+
+ def test_validate_region_update_succeeds(self):
+ """Test that we validate a region update request."""
+ request_to_validate = {'id': 'us-west',
+ 'description': 'US West Region',
+ 'parent_region_id': 'us-region'}
+ self.update_region_validator.validate(request_to_validate)
+
+ def test_validate_region_update_succeeds_with_extra_parameters(self):
+ """Validate extra attributes in the region update request."""
+ request_to_validate = {'other_attr': uuid.uuid4().hex}
+ self.update_region_validator.validate(request_to_validate)
+
+ def test_validate_region_update_fails_with_no_parameters(self):
+ """Exception raised when passing no parameters in a region update."""
+ # An update request should consist of at least one value to update
+ request_to_validate = {}
+ self.assertRaises(exception.SchemaValidationError,
+ self.update_region_validator.validate,
+ request_to_validate)
+
+
+class ServiceValidationTestCase(testtools.TestCase):
+ """Test for V3 Service API validation."""
+
+ def setUp(self):
+ super(ServiceValidationTestCase, self).setUp()
+
+ create = catalog_schema.service_create
+ update = catalog_schema.service_update
+ self.create_service_validator = validators.SchemaValidator(create)
+ self.update_service_validator = validators.SchemaValidator(update)
+
+ def test_validate_service_create_succeeds(self):
+ """Test that we validate a service create request."""
+ request_to_validate = {'name': 'Nova',
+ 'description': 'OpenStack Compute Service',
+ 'enabled': True,
+ 'type': 'compute'}
+ self.create_service_validator.validate(request_to_validate)
+
+ def test_validate_service_create_succeeds_with_required_parameters(self):
+ """Validate a service create request with the required parameters."""
+ # The only parameter type required for service creation is 'type'
+ request_to_validate = {'type': 'compute'}
+ self.create_service_validator.validate(request_to_validate)
+
+ def test_validate_service_create_fails_without_type(self):
+ """Exception raised when trying to create a service without `type`."""
+ request_to_validate = {'name': 'Nova'}
+ self.assertRaises(exception.SchemaValidationError,
+ self.create_service_validator.validate,
+ request_to_validate)
+
+ def test_validate_service_create_succeeds_with_extra_parameters(self):
+ """Test that extra parameters pass validation on create service."""
+ request_to_validate = {'other_attr': uuid.uuid4().hex,
+ 'type': uuid.uuid4().hex}
+ self.create_service_validator.validate(request_to_validate)
+
+ def test_validate_service_create_succeeds_with_valid_enabled(self):
+ """Validate boolean values as enabled values on service create."""
+ for valid_enabled in _VALID_ENABLED_FORMATS:
+ request_to_validate = {'enabled': valid_enabled,
+ 'type': uuid.uuid4().hex}
+ self.create_service_validator.validate(request_to_validate)
+
+ def test_validate_service_create_fails_with_invalid_enabled(self):
+ """Exception raised when boolean-like parameters as `enabled`
+
+ On service create, make sure an exception is raised if `enabled` is
+ not a boolean value.
+ """
+ for invalid_enabled in _INVALID_ENABLED_FORMATS:
+ request_to_validate = {'enabled': invalid_enabled,
+ 'type': uuid.uuid4().hex}
+ self.assertRaises(exception.SchemaValidationError,
+ self.create_service_validator.validate,
+ request_to_validate)
+
+ def test_validate_service_create_fails_when_name_too_long(self):
+ """Exception raised when `name` is greater than 255 characters."""
+ long_name = 'a' * 256
+ request_to_validate = {'type': 'compute',
+ 'name': long_name}
+ self.assertRaises(exception.SchemaValidationError,
+ self.create_service_validator.validate,
+ request_to_validate)
+
+ def test_validate_service_create_fails_when_name_too_short(self):
+ """Exception is raised when `name` is too short."""
+ request_to_validate = {'type': 'compute',
+ 'name': ''}
+ self.assertRaises(exception.SchemaValidationError,
+ self.create_service_validator.validate,
+ request_to_validate)
+
+ def test_validate_service_create_fails_when_type_too_long(self):
+ """Exception is raised when `type` is too long."""
+ long_type_name = 'a' * 256
+ request_to_validate = {'type': long_type_name}
+ self.assertRaises(exception.SchemaValidationError,
+ self.create_service_validator.validate,
+ request_to_validate)
+
+ def test_validate_service_create_fails_when_type_too_short(self):
+ """Exception is raised when `type` is too short."""
+ request_to_validate = {'type': ''}
+ self.assertRaises(exception.SchemaValidationError,
+ self.create_service_validator.validate,
+ request_to_validate)
+
+ def test_validate_service_update_request_succeeds(self):
+ """Test that we validate a service update request."""
+ request_to_validate = {'name': 'Cinder',
+ 'type': 'volume',
+ 'description': 'OpenStack Block Storage',
+ 'enabled': False}
+ self.update_service_validator.validate(request_to_validate)
+
+ def test_validate_service_update_fails_with_no_parameters(self):
+ """Exception raised when updating a service without values."""
+ request_to_validate = {}
+ self.assertRaises(exception.SchemaValidationError,
+ self.update_service_validator.validate,
+ request_to_validate)
+
+ def test_validate_service_update_succeeds_with_extra_parameters(self):
+ """Validate updating a service with extra parameters."""
+ request_to_validate = {'other_attr': uuid.uuid4().hex}
+ self.update_service_validator.validate(request_to_validate)
+
+ def test_validate_service_update_succeeds_with_valid_enabled(self):
+ """Validate boolean formats as `enabled` on service update."""
+ for valid_enabled in _VALID_ENABLED_FORMATS:
+ request_to_validate = {'enabled': valid_enabled}
+ self.update_service_validator.validate(request_to_validate)
+
+ def test_validate_service_update_fails_with_invalid_enabled(self):
+ """Exception raised when boolean-like values as `enabled`."""
+ for invalid_enabled in _INVALID_ENABLED_FORMATS:
+ request_to_validate = {'enabled': invalid_enabled}
+ self.assertRaises(exception.SchemaValidationError,
+ self.update_service_validator.validate,
+ request_to_validate)
+
+ def test_validate_service_update_fails_with_name_too_long(self):
+ """Exception is raised when `name` is too long on update."""
+ long_name = 'a' * 256
+ request_to_validate = {'name': long_name}
+ self.assertRaises(exception.SchemaValidationError,
+ self.update_service_validator.validate,
+ request_to_validate)
+
+ def test_validate_service_update_fails_with_name_too_short(self):
+ """Exception is raised when `name` is too short on update."""
+ request_to_validate = {'name': ''}
+ self.assertRaises(exception.SchemaValidationError,
+ self.update_service_validator.validate,
+ request_to_validate)
+
+ def test_validate_service_update_fails_with_type_too_long(self):
+ """Exception is raised when `type` is too long on update."""
+ long_type_name = 'a' * 256
+ request_to_validate = {'type': long_type_name}
+ self.assertRaises(exception.SchemaValidationError,
+ self.update_service_validator.validate,
+ request_to_validate)
+
+ def test_validate_service_update_fails_with_type_too_short(self):
+ """Exception is raised when `type` is too short on update."""
+ request_to_validate = {'type': ''}
+ self.assertRaises(exception.SchemaValidationError,
+ self.update_service_validator.validate,
+ request_to_validate)
+
+
+class EndpointValidationTestCase(testtools.TestCase):
+ """Test for V3 Endpoint API validation."""
+
+ def setUp(self):
+ super(EndpointValidationTestCase, self).setUp()
+
+ create = catalog_schema.endpoint_create
+ update = catalog_schema.endpoint_update
+ self.create_endpoint_validator = validators.SchemaValidator(create)
+ self.update_endpoint_validator = validators.SchemaValidator(update)
+
+ def test_validate_endpoint_request_succeeds(self):
+ """Test that we validate an endpoint request."""
+ request_to_validate = {'enabled': True,
+ 'interface': 'admin',
+ 'region_id': uuid.uuid4().hex,
+ 'service_id': uuid.uuid4().hex,
+ 'url': 'https://service.example.com:5000/'}
+ self.create_endpoint_validator.validate(request_to_validate)
+
+ def test_validate_endpoint_create_succeeds_with_required_parameters(self):
+ """Validate an endpoint request with only the required parameters."""
+ # According to the Identity V3 API endpoint creation requires
+ # 'service_id', 'interface', and 'url'
+ request_to_validate = {'service_id': uuid.uuid4().hex,
+ 'interface': 'public',
+ 'url': 'https://service.example.com:5000/'}
+ self.create_endpoint_validator.validate(request_to_validate)
+
+ def test_validate_endpoint_create_succeeds_with_valid_enabled(self):
+ """Validate an endpoint with boolean values.
+
+ Validate boolean values as `enabled` in endpoint create requests.
+ """
+ for valid_enabled in _VALID_ENABLED_FORMATS:
+ request_to_validate = {'enabled': valid_enabled,
+ 'service_id': uuid.uuid4().hex,
+ 'interface': 'public',
+ 'url': 'https://service.example.com:5000/'}
+ self.create_endpoint_validator.validate(request_to_validate)
+
+ def test_validate_create_endpoint_fails_with_invalid_enabled(self):
+ """Exception raised when boolean-like values as `enabled`."""
+ for invalid_enabled in _INVALID_ENABLED_FORMATS:
+ request_to_validate = {'enabled': invalid_enabled,
+ 'service_id': uuid.uuid4().hex,
+ 'interface': 'public',
+ 'url': 'https://service.example.com:5000/'}
+ self.assertRaises(exception.SchemaValidationError,
+ self.create_endpoint_validator.validate,
+ request_to_validate)
+
+ def test_validate_endpoint_create_succeeds_with_extra_parameters(self):
+ """Test that extra parameters pass validation on create endpoint."""
+ request_to_validate = {'other_attr': uuid.uuid4().hex,
+ 'service_id': uuid.uuid4().hex,
+ 'interface': 'public',
+ 'url': 'https://service.example.com:5000/'}
+ self.create_endpoint_validator.validate(request_to_validate)
+
+ def test_validate_endpoint_create_fails_without_service_id(self):
+ """Exception raised when `service_id` isn't in endpoint request."""
+ request_to_validate = {'interface': 'public',
+ 'url': 'https://service.example.com:5000/'}
+ self.assertRaises(exception.SchemaValidationError,
+ self.create_endpoint_validator.validate,
+ request_to_validate)
+
+ def test_validate_endpoint_create_fails_without_interface(self):
+ """Exception raised when `interface` isn't in endpoint request."""
+ request_to_validate = {'service_id': uuid.uuid4().hex,
+ 'url': 'https://service.example.com:5000/'}
+ self.assertRaises(exception.SchemaValidationError,
+ self.create_endpoint_validator.validate,
+ request_to_validate)
+
+ def test_validate_endpoint_create_fails_without_url(self):
+ """Exception raised when `url` isn't in endpoint request."""
+ request_to_validate = {'service_id': uuid.uuid4().hex,
+ 'interface': 'public'}
+ self.assertRaises(exception.SchemaValidationError,
+ self.create_endpoint_validator.validate,
+ request_to_validate)
+
+ def test_validate_endpoint_create_succeeds_with_url(self):
+ """Validate `url` attribute in endpoint create request."""
+ request_to_validate = {'service_id': uuid.uuid4().hex,
+ 'interface': 'public'}
+ for url in _VALID_URLS:
+ request_to_validate['url'] = url
+ self.create_endpoint_validator.validate(request_to_validate)
+
+ def test_validate_endpoint_create_fails_with_invalid_url(self):
+ """Exception raised when passing invalid `url` in request."""
+ request_to_validate = {'service_id': uuid.uuid4().hex,
+ 'interface': 'public'}
+ for url in _INVALID_URLS:
+ request_to_validate['url'] = url
+ self.assertRaises(exception.SchemaValidationError,
+ self.create_endpoint_validator.validate,
+ request_to_validate)
+
+ def test_validate_endpoint_create_fails_with_invalid_interface(self):
+ """Exception raised with invalid `interface`."""
+ request_to_validate = {'interface': uuid.uuid4().hex,
+ 'service_id': uuid.uuid4().hex,
+ 'url': 'https://service.example.com:5000/'}
+ self.assertRaises(exception.SchemaValidationError,
+ self.create_endpoint_validator.validate,
+ request_to_validate)
+
+ def test_validate_endpoint_update_fails_with_invalid_enabled(self):
+ """Exception raised when `enabled` is boolean-like value."""
+ for invalid_enabled in _INVALID_ENABLED_FORMATS:
+ request_to_validate = {'enabled': invalid_enabled}
+ self.assertRaises(exception.SchemaValidationError,
+ self.update_endpoint_validator.validate,
+ request_to_validate)
+
+ def test_validate_endpoint_update_succeeds_with_valid_enabled(self):
+ """Validate `enabled` as boolean values."""
+ for valid_enabled in _VALID_ENABLED_FORMATS:
+ request_to_validate = {'enabled': valid_enabled}
+ self.update_endpoint_validator.validate(request_to_validate)
+
+ def test_validate_endpoint_update_fails_with_invalid_interface(self):
+ """Exception raised when invalid `interface` on endpoint update."""
+ request_to_validate = {'interface': uuid.uuid4().hex,
+ 'service_id': uuid.uuid4().hex,
+ 'url': 'https://service.example.com:5000/'}
+ self.assertRaises(exception.SchemaValidationError,
+ self.update_endpoint_validator.validate,
+ request_to_validate)
+
+ def test_validate_endpoint_update_request_succeeds(self):
+ """Test that we validate an endpoint update request."""
+ request_to_validate = {'enabled': True,
+ 'interface': 'admin',
+ 'region_id': uuid.uuid4().hex,
+ 'service_id': uuid.uuid4().hex,
+ 'url': 'https://service.example.com:5000/'}
+ self.update_endpoint_validator.validate(request_to_validate)
+
+ def test_validate_endpoint_update_fails_with_no_parameters(self):
+ """Exception raised when no parameters on endpoint update."""
+ request_to_validate = {}
+ self.assertRaises(exception.SchemaValidationError,
+ self.update_endpoint_validator.validate,
+ request_to_validate)
+
+ def test_validate_endpoint_update_succeeds_with_extra_parameters(self):
+ """Test that extra parameters pass validation on update endpoint."""
+ request_to_validate = {'enabled': True,
+ 'interface': 'admin',
+ 'region_id': uuid.uuid4().hex,
+ 'service_id': uuid.uuid4().hex,
+ 'url': 'https://service.example.com:5000/',
+ 'other_attr': uuid.uuid4().hex}
+ self.update_endpoint_validator.validate(request_to_validate)
+
+ def test_validate_endpoint_update_succeeds_with_url(self):
+ """Validate `url` attribute in endpoint update request."""
+ request_to_validate = {'service_id': uuid.uuid4().hex,
+ 'interface': 'public'}
+ for url in _VALID_URLS:
+ request_to_validate['url'] = url
+ self.update_endpoint_validator.validate(request_to_validate)
+
+ def test_validate_endpoint_update_fails_with_invalid_url(self):
+ """Exception raised when passing invalid `url` in request."""
+ request_to_validate = {'service_id': uuid.uuid4().hex,
+ 'interface': 'public'}
+ for url in _INVALID_URLS:
+ request_to_validate['url'] = url
+ self.assertRaises(exception.SchemaValidationError,
+ self.update_endpoint_validator.validate,
+ request_to_validate)
+
+
+class EndpointGroupValidationTestCase(testtools.TestCase):
+ """Test for V3 Endpoint Group API validation."""
+
+ def setUp(self):
+ super(EndpointGroupValidationTestCase, self).setUp()
+
+ create = endpoint_filter_schema.endpoint_group_create
+ update = endpoint_filter_schema.endpoint_group_update
+ self.create_endpoint_grp_validator = validators.SchemaValidator(create)
+ self.update_endpoint_grp_validator = validators.SchemaValidator(update)
+
+ def test_validate_endpoint_group_request_succeeds(self):
+ """Test that we validate an endpoint group request."""
+ request_to_validate = {'description': 'endpoint group description',
+ 'filters': {'interface': 'admin'},
+ 'name': 'endpoint_group_name'}
+ self.create_endpoint_grp_validator.validate(request_to_validate)
+
+ def test_validate_endpoint_group_create_succeeds_with_req_parameters(self):
+ """Validate required endpoint group parameters.
+
+ This test ensure that validation succeeds with only the required
+ parameters passed for creating an endpoint group.
+ """
+ request_to_validate = {'filters': {'interface': 'admin'},
+ 'name': 'endpoint_group_name'}
+ self.create_endpoint_grp_validator.validate(request_to_validate)
+
+ def test_validate_endpoint_group_create_succeeds_with_valid_filters(self):
+ """Validate dict values as `filters` in endpoint group create requests.
+ """
+ request_to_validate = {'description': 'endpoint group description',
+ 'name': 'endpoint_group_name'}
+ for valid_filters in _VALID_FILTERS:
+ request_to_validate['filters'] = valid_filters
+ self.create_endpoint_grp_validator.validate(request_to_validate)
+
+ def test_validate_create_endpoint_group_fails_with_invalid_filters(self):
+ """Validate invalid `filters` value in endpoint group parameters.
+
+ This test ensures that exception is raised when non-dict values is
+ used as `filters` in endpoint group create request.
+ """
+ request_to_validate = {'description': 'endpoint group description',
+ 'name': 'endpoint_group_name'}
+ for invalid_filters in _INVALID_FILTERS:
+ request_to_validate['filters'] = invalid_filters
+ self.assertRaises(exception.SchemaValidationError,
+ self.create_endpoint_grp_validator.validate,
+ request_to_validate)
+
+ def test_validate_endpoint_group_create_fails_without_name(self):
+ """Exception raised when `name` isn't in endpoint group request."""
+ request_to_validate = {'description': 'endpoint group description',
+ 'filters': {'interface': 'admin'}}
+ self.assertRaises(exception.SchemaValidationError,
+ self.create_endpoint_grp_validator.validate,
+ request_to_validate)
+
+ def test_validate_endpoint_group_create_fails_without_filters(self):
+ """Exception raised when `filters` isn't in endpoint group request."""
+ request_to_validate = {'description': 'endpoint group description',
+ 'name': 'endpoint_group_name'}
+ self.assertRaises(exception.SchemaValidationError,
+ self.create_endpoint_grp_validator.validate,
+ request_to_validate)
+
+ def test_validate_endpoint_group_update_request_succeeds(self):
+ """Test that we validate an endpoint group update request."""
+ request_to_validate = {'description': 'endpoint group description',
+ 'filters': {'interface': 'admin'},
+ 'name': 'endpoint_group_name'}
+ self.update_endpoint_grp_validator.validate(request_to_validate)
+
+ def test_validate_endpoint_group_update_fails_with_no_parameters(self):
+ """Exception raised when no parameters on endpoint group update."""
+ request_to_validate = {}
+ self.assertRaises(exception.SchemaValidationError,
+ self.update_endpoint_grp_validator.validate,
+ request_to_validate)
+
+ def test_validate_endpoint_group_update_succeeds_with_name(self):
+ """Validate request with only `name` in endpoint group update.
+
+ This test ensures that passing only a `name` passes validation
+ on update endpoint group request.
+ """
+ request_to_validate = {'name': 'endpoint_group_name'}
+ self.update_endpoint_grp_validator.validate(request_to_validate)
+
+ def test_validate_endpoint_group_update_succeeds_with_valid_filters(self):
+ """Validate `filters` as dict values."""
+ for valid_filters in _VALID_FILTERS:
+ request_to_validate = {'filters': valid_filters}
+ self.update_endpoint_grp_validator.validate(request_to_validate)
+
+ def test_validate_endpoint_group_update_fails_with_invalid_filters(self):
+ """Exception raised when passing invalid `filters` in request."""
+ for invalid_filters in _INVALID_FILTERS:
+ request_to_validate = {'filters': invalid_filters}
+ self.assertRaises(exception.SchemaValidationError,
+ self.update_endpoint_grp_validator.validate,
+ request_to_validate)
+
+
+class TrustValidationTestCase(testtools.TestCase):
+ """Test for V3 Trust API validation."""
+
+ _valid_roles = ['member', uuid.uuid4().hex, str(uuid.uuid4())]
+ _invalid_roles = [False, True, 123, None]
+
+ def setUp(self):
+ super(TrustValidationTestCase, self).setUp()
+
+ create = trust_schema.trust_create
+ self.create_trust_validator = validators.SchemaValidator(create)
+
+ def test_validate_trust_succeeds(self):
+ """Test that we can validate a trust request."""
+ request_to_validate = {'trustor_user_id': uuid.uuid4().hex,
+ 'trustee_user_id': uuid.uuid4().hex,
+ 'impersonation': False}
+ self.create_trust_validator.validate(request_to_validate)
+
+ def test_validate_trust_with_all_parameters_succeeds(self):
+ """Test that we can validate a trust request with all parameters."""
+ request_to_validate = {'trustor_user_id': uuid.uuid4().hex,
+ 'trustee_user_id': uuid.uuid4().hex,
+ 'impersonation': False,
+ 'project_id': uuid.uuid4().hex,
+ 'roles': [uuid.uuid4().hex, uuid.uuid4().hex],
+ 'expires_at': 'some timestamp',
+ 'remaining_uses': 2}
+ self.create_trust_validator.validate(request_to_validate)
+
+ def test_validate_trust_without_trustor_id_fails(self):
+ """Validate trust request fails without `trustor_id`."""
+ request_to_validate = {'trustee_user_id': uuid.uuid4().hex,
+ 'impersonation': False}
+ self.assertRaises(exception.SchemaValidationError,
+ self.create_trust_validator.validate,
+ request_to_validate)
+
+ def test_validate_trust_without_trustee_id_fails(self):
+ """Validate trust request fails without `trustee_id`."""
+ request_to_validate = {'trusor_user_id': uuid.uuid4().hex,
+ 'impersonation': False}
+ self.assertRaises(exception.SchemaValidationError,
+ self.create_trust_validator.validate,
+ request_to_validate)
+
+ def test_validate_trust_without_impersonation_fails(self):
+ """Validate trust request fails without `impersonation`."""
+ request_to_validate = {'trustee_user_id': uuid.uuid4().hex,
+ 'trustor_user_id': uuid.uuid4().hex}
+ self.assertRaises(exception.SchemaValidationError,
+ self.create_trust_validator.validate,
+ request_to_validate)
+
+ def test_validate_trust_with_extra_parameters_succeeds(self):
+ """Test that we can validate a trust request with extra parameters."""
+ request_to_validate = {'trustor_user_id': uuid.uuid4().hex,
+ 'trustee_user_id': uuid.uuid4().hex,
+ 'impersonation': False,
+ 'project_id': uuid.uuid4().hex,
+ 'roles': [uuid.uuid4().hex, uuid.uuid4().hex],
+ 'expires_at': 'some timestamp',
+ 'remaining_uses': 2,
+ 'extra': 'something extra!'}
+ self.create_trust_validator.validate(request_to_validate)
+
+ def test_validate_trust_with_invalid_impersonation_fails(self):
+ """Validate trust request with invalid `impersonation` fails."""
+ request_to_validate = {'trustor_user_id': uuid.uuid4().hex,
+ 'trustee_user_id': uuid.uuid4().hex,
+ 'impersonation': 2}
+ self.assertRaises(exception.SchemaValidationError,
+ self.create_trust_validator.validate,
+ request_to_validate)
+
+ def test_validate_trust_with_null_remaining_uses_succeeds(self):
+ """Validate trust request with null `remaining_uses`."""
+ request_to_validate = {'trustor_user_id': uuid.uuid4().hex,
+ 'trustee_user_id': uuid.uuid4().hex,
+ 'impersonation': False,
+ 'remaining_uses': None}
+ self.create_trust_validator.validate(request_to_validate)
+
+ def test_validate_trust_with_remaining_uses_succeeds(self):
+ """Validate trust request with `remaining_uses` succeeds."""
+ request_to_validate = {'trustor_user_id': uuid.uuid4().hex,
+ 'trustee_user_id': uuid.uuid4().hex,
+ 'impersonation': False,
+ 'remaining_uses': 2}
+ self.create_trust_validator.validate(request_to_validate)
+
+ def test_validate_trust_with_invalid_expires_at_fails(self):
+ """Validate trust request with invalid `expires_at` fails."""
+ request_to_validate = {'trustor_user_id': uuid.uuid4().hex,
+ 'trustee_user_id': uuid.uuid4().hex,
+ 'impersonation': False,
+ 'expires_at': 3}
+ self.assertRaises(exception.SchemaValidationError,
+ self.create_trust_validator.validate,
+ request_to_validate)
+
+ def test_validate_trust_with_role_types_succeeds(self):
+ """Validate trust request with `roles` succeeds."""
+ for role in self._valid_roles:
+ request_to_validate = {'trustor_user_id': uuid.uuid4().hex,
+ 'trustee_user_id': uuid.uuid4().hex,
+ 'impersonation': False,
+ 'roles': [role]}
+ self.create_trust_validator.validate(request_to_validate)
+
+ def test_validate_trust_with_invalid_role_type_fails(self):
+ """Validate trust request with invalid `roles` fails."""
+ for role in self._invalid_roles:
+ request_to_validate = {'trustor_user_id': uuid.uuid4().hex,
+ 'trustee_user_id': uuid.uuid4().hex,
+ 'impersonation': False,
+ 'roles': role}
+ self.assertRaises(exception.SchemaValidationError,
+ self.create_trust_validator.validate,
+ request_to_validate)
+
+ def test_validate_trust_with_list_of_valid_roles_succeeds(self):
+ """Validate trust request with a list of valid `roles`."""
+ request_to_validate = {'trustor_user_id': uuid.uuid4().hex,
+ 'trustee_user_id': uuid.uuid4().hex,
+ 'impersonation': False,
+ 'roles': self._valid_roles}
+ self.create_trust_validator.validate(request_to_validate)
+
+
+class ServiceProviderValidationTestCase(testtools.TestCase):
+ """Test for V3 Service Provider API validation."""
+
+ def setUp(self):
+ super(ServiceProviderValidationTestCase, self).setUp()
+
+ self.valid_auth_url = 'https://' + uuid.uuid4().hex + '.com'
+ self.valid_sp_url = 'https://' + uuid.uuid4().hex + '.com'
+
+ create = federation_schema.service_provider_create
+ update = federation_schema.service_provider_update
+ self.create_sp_validator = validators.SchemaValidator(create)
+ self.update_sp_validator = validators.SchemaValidator(update)
+
+ def test_validate_sp_request(self):
+ """Test that we validate `auth_url` and `sp_url` in request."""
+ request_to_validate = {
+ 'auth_url': self.valid_auth_url,
+ 'sp_url': self.valid_sp_url
+ }
+ self.create_sp_validator.validate(request_to_validate)
+
+ def test_validate_sp_request_with_invalid_auth_url_fails(self):
+ """Validate request fails with invalid `auth_url`."""
+ request_to_validate = {
+ 'auth_url': uuid.uuid4().hex,
+ 'sp_url': self.valid_sp_url
+ }
+ self.assertRaises(exception.SchemaValidationError,
+ self.create_sp_validator.validate,
+ request_to_validate)
+
+ def test_validate_sp_request_with_invalid_sp_url_fails(self):
+ """Validate request fails with invalid `sp_url`."""
+ request_to_validate = {
+ 'auth_url': self.valid_auth_url,
+ 'sp_url': uuid.uuid4().hex,
+ }
+ self.assertRaises(exception.SchemaValidationError,
+ self.create_sp_validator.validate,
+ request_to_validate)
+
+ def test_validate_sp_request_without_auth_url_fails(self):
+ """Validate request fails without `auth_url`."""
+ request_to_validate = {
+ 'sp_url': self.valid_sp_url
+ }
+ self.assertRaises(exception.SchemaValidationError,
+ self.create_sp_validator.validate,
+ request_to_validate)
+ request_to_validate = {
+ 'auth_url': None,
+ 'sp_url': self.valid_sp_url
+ }
+ self.assertRaises(exception.SchemaValidationError,
+ self.create_sp_validator.validate,
+ request_to_validate)
+
+ def test_validate_sp_request_without_sp_url_fails(self):
+ """Validate request fails without `sp_url`."""
+ request_to_validate = {
+ 'auth_url': self.valid_auth_url,
+ }
+ self.assertRaises(exception.SchemaValidationError,
+ self.create_sp_validator.validate,
+ request_to_validate)
+ request_to_validate = {
+ 'auth_url': self.valid_auth_url,
+ 'sp_url': None,
+ }
+ self.assertRaises(exception.SchemaValidationError,
+ self.create_sp_validator.validate,
+ request_to_validate)
+
+ def test_validate_sp_request_with_enabled(self):
+ """Validate `enabled` as boolean-like values."""
+ for valid_enabled in _VALID_ENABLED_FORMATS:
+ request_to_validate = {
+ 'auth_url': self.valid_auth_url,
+ 'sp_url': self.valid_sp_url,
+ 'enabled': valid_enabled
+ }
+ self.create_sp_validator.validate(request_to_validate)
+
+ def test_validate_sp_request_with_invalid_enabled_fails(self):
+ """Exception is raised when `enabled` isn't a boolean-like value."""
+ for invalid_enabled in _INVALID_ENABLED_FORMATS:
+ request_to_validate = {
+ 'auth_url': self.valid_auth_url,
+ 'sp_url': self.valid_sp_url,
+ 'enabled': invalid_enabled
+ }
+ self.assertRaises(exception.SchemaValidationError,
+ self.create_sp_validator.validate,
+ request_to_validate)
+
+ def test_validate_sp_request_with_valid_description(self):
+ """Test that we validate `description` in create requests."""
+ request_to_validate = {
+ 'auth_url': self.valid_auth_url,
+ 'sp_url': self.valid_sp_url,
+ 'description': 'My Service Provider'
+ }
+ self.create_sp_validator.validate(request_to_validate)
+
+ def test_validate_sp_request_with_invalid_description_fails(self):
+ """Exception is raised when `description` as a non-string value."""
+ request_to_validate = {
+ 'auth_url': self.valid_auth_url,
+ 'sp_url': self.valid_sp_url,
+ 'description': False
+ }
+ self.assertRaises(exception.SchemaValidationError,
+ self.create_sp_validator.validate,
+ request_to_validate)
+
+ def test_validate_sp_request_with_extra_field_fails(self):
+ """Exception raised when passing extra fields in the body."""
+ # 'id' can't be passed in the body since it is passed in the URL
+ request_to_validate = {
+ 'id': 'ACME',
+ 'auth_url': self.valid_auth_url,
+ 'sp_url': self.valid_sp_url,
+ 'description': 'My Service Provider'
+ }
+ self.assertRaises(exception.SchemaValidationError,
+ self.create_sp_validator.validate,
+ request_to_validate)
+
+ def test_validate_sp_update_request(self):
+ """Test that we validate a update request."""
+ request_to_validate = {'description': uuid.uuid4().hex}
+ self.update_sp_validator.validate(request_to_validate)
+
+ def test_validate_sp_update_request_with_no_parameters_fails(self):
+ """Exception is raised when updating without parameters."""
+ request_to_validate = {}
+ self.assertRaises(exception.SchemaValidationError,
+ self.update_sp_validator.validate,
+ request_to_validate)
+
+ def test_validate_sp_update_request_with_invalid_auth_url_fails(self):
+ """Exception raised when updating with invalid `auth_url`."""
+ request_to_validate = {'auth_url': uuid.uuid4().hex}
+ self.assertRaises(exception.SchemaValidationError,
+ self.update_sp_validator.validate,
+ request_to_validate)
+ request_to_validate = {'auth_url': None}
+ self.assertRaises(exception.SchemaValidationError,
+ self.update_sp_validator.validate,
+ request_to_validate)
+
+ def test_validate_sp_update_request_with_invalid_sp_url_fails(self):
+ """Exception raised when updating with invalid `sp_url`."""
+ request_to_validate = {'sp_url': uuid.uuid4().hex}
+ self.assertRaises(exception.SchemaValidationError,
+ self.update_sp_validator.validate,
+ request_to_validate)
+ request_to_validate = {'sp_url': None}
+ self.assertRaises(exception.SchemaValidationError,
+ self.update_sp_validator.validate,
+ request_to_validate)
diff --git a/keystone-moon/keystone/tests/unit/test_versions.py b/keystone-moon/keystone/tests/unit/test_versions.py
new file mode 100644
index 00000000..6fe692ad
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/test_versions.py
@@ -0,0 +1,1051 @@
+# Copyright 2012 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import copy
+import functools
+import random
+
+import mock
+from oslo_config import cfg
+from oslo_serialization import jsonutils
+from testtools import matchers as tt_matchers
+
+from keystone.common import json_home
+from keystone import controllers
+from keystone.tests import unit as tests
+
+
+CONF = cfg.CONF
+
+v2_MEDIA_TYPES = [
+ {
+ "base": "application/json",
+ "type": "application/"
+ "vnd.openstack.identity-v2.0+json"
+ }
+]
+
+v2_HTML_DESCRIPTION = {
+ "rel": "describedby",
+ "type": "text/html",
+ "href": "http://docs.openstack.org/"
+}
+
+
+v2_EXPECTED_RESPONSE = {
+ "id": "v2.0",
+ "status": "stable",
+ "updated": "2014-04-17T00:00:00Z",
+ "links": [
+ {
+ "rel": "self",
+ "href": "", # Will get filled in after initialization
+ },
+ v2_HTML_DESCRIPTION
+ ],
+ "media-types": v2_MEDIA_TYPES
+}
+
+v2_VERSION_RESPONSE = {
+ "version": v2_EXPECTED_RESPONSE
+}
+
+v3_MEDIA_TYPES = [
+ {
+ "base": "application/json",
+ "type": "application/"
+ "vnd.openstack.identity-v3+json"
+ }
+]
+
+v3_EXPECTED_RESPONSE = {
+ "id": "v3.0",
+ "status": "stable",
+ "updated": "2013-03-06T00:00:00Z",
+ "links": [
+ {
+ "rel": "self",
+ "href": "", # Will get filled in after initialization
+ }
+ ],
+ "media-types": v3_MEDIA_TYPES
+}
+
+v3_VERSION_RESPONSE = {
+ "version": v3_EXPECTED_RESPONSE
+}
+
+VERSIONS_RESPONSE = {
+ "versions": {
+ "values": [
+ v3_EXPECTED_RESPONSE,
+ v2_EXPECTED_RESPONSE
+ ]
+ }
+}
+
+_build_ec2tokens_relation = functools.partial(
+ json_home.build_v3_extension_resource_relation, extension_name='OS-EC2',
+ extension_version='1.0')
+
+REVOCATIONS_RELATION = json_home.build_v3_extension_resource_relation(
+ 'OS-PKI', '1.0', 'revocations')
+
+_build_simple_cert_relation = functools.partial(
+ json_home.build_v3_extension_resource_relation,
+ extension_name='OS-SIMPLE-CERT', extension_version='1.0')
+
+_build_trust_relation = functools.partial(
+ json_home.build_v3_extension_resource_relation, extension_name='OS-TRUST',
+ extension_version='1.0')
+
+_build_federation_rel = functools.partial(
+ json_home.build_v3_extension_resource_relation,
+ extension_name='OS-FEDERATION',
+ extension_version='1.0')
+
+_build_oauth1_rel = functools.partial(
+ json_home.build_v3_extension_resource_relation,
+ extension_name='OS-OAUTH1', extension_version='1.0')
+
+_build_ep_policy_rel = functools.partial(
+ json_home.build_v3_extension_resource_relation,
+ extension_name='OS-ENDPOINT-POLICY', extension_version='1.0')
+
+_build_ep_filter_rel = functools.partial(
+ json_home.build_v3_extension_resource_relation,
+ extension_name='OS-EP-FILTER', extension_version='1.0')
+
+TRUST_ID_PARAMETER_RELATION = json_home.build_v3_extension_parameter_relation(
+ 'OS-TRUST', '1.0', 'trust_id')
+
+IDP_ID_PARAMETER_RELATION = json_home.build_v3_extension_parameter_relation(
+ 'OS-FEDERATION', '1.0', 'idp_id')
+
+PROTOCOL_ID_PARAM_RELATION = json_home.build_v3_extension_parameter_relation(
+ 'OS-FEDERATION', '1.0', 'protocol_id')
+
+MAPPING_ID_PARAM_RELATION = json_home.build_v3_extension_parameter_relation(
+ 'OS-FEDERATION', '1.0', 'mapping_id')
+
+SP_ID_PARAMETER_RELATION = json_home.build_v3_extension_parameter_relation(
+ 'OS-FEDERATION', '1.0', 'sp_id')
+
+CONSUMER_ID_PARAMETER_RELATION = (
+ json_home.build_v3_extension_parameter_relation(
+ 'OS-OAUTH1', '1.0', 'consumer_id'))
+
+REQUEST_TOKEN_ID_PARAMETER_RELATION = (
+ json_home.build_v3_extension_parameter_relation(
+ 'OS-OAUTH1', '1.0', 'request_token_id'))
+
+ACCESS_TOKEN_ID_PARAMETER_RELATION = (
+ json_home.build_v3_extension_parameter_relation(
+ 'OS-OAUTH1', '1.0', 'access_token_id'))
+
+ENDPOINT_GROUP_ID_PARAMETER_RELATION = (
+ json_home.build_v3_extension_parameter_relation(
+ 'OS-EP-FILTER', '1.0', 'endpoint_group_id'))
+
+BASE_IDP_PROTOCOL = '/OS-FEDERATION/identity_providers/{idp_id}/protocols'
+BASE_EP_POLICY = '/policies/{policy_id}/OS-ENDPOINT-POLICY'
+BASE_EP_FILTER = '/OS-EP-FILTER/endpoint_groups/{endpoint_group_id}'
+BASE_ACCESS_TOKEN = (
+ '/users/{user_id}/OS-OAUTH1/access_tokens/{access_token_id}')
+
+# TODO(stevemar): Use BASE_IDP_PROTOCOL when bug 1420125 is resolved.
+FEDERATED_AUTH_URL = ('/OS-FEDERATION/identity_providers/{identity_provider}'
+ '/protocols/{protocol}/auth')
+
+V3_JSON_HOME_RESOURCES_INHERIT_DISABLED = {
+ json_home.build_v3_resource_relation('auth_tokens'): {
+ 'href': '/auth/tokens'},
+ json_home.build_v3_resource_relation('auth_catalog'): {
+ 'href': '/auth/catalog'},
+ json_home.build_v3_resource_relation('auth_projects'): {
+ 'href': '/auth/projects'},
+ json_home.build_v3_resource_relation('auth_domains'): {
+ 'href': '/auth/domains'},
+ json_home.build_v3_resource_relation('credential'): {
+ 'href-template': '/credentials/{credential_id}',
+ 'href-vars': {
+ 'credential_id':
+ json_home.build_v3_parameter_relation('credential_id')}},
+ json_home.build_v3_resource_relation('credentials'): {
+ 'href': '/credentials'},
+ json_home.build_v3_resource_relation('domain'): {
+ 'href-template': '/domains/{domain_id}',
+ 'href-vars': {'domain_id': json_home.Parameters.DOMAIN_ID, }},
+ json_home.build_v3_resource_relation('domain_group_role'): {
+ 'href-template':
+ '/domains/{domain_id}/groups/{group_id}/roles/{role_id}',
+ 'href-vars': {
+ 'domain_id': json_home.Parameters.DOMAIN_ID,
+ 'group_id': json_home.Parameters.GROUP_ID,
+ 'role_id': json_home.Parameters.ROLE_ID, }},
+ json_home.build_v3_resource_relation('domain_group_roles'): {
+ 'href-template': '/domains/{domain_id}/groups/{group_id}/roles',
+ 'href-vars': {
+ 'domain_id': json_home.Parameters.DOMAIN_ID,
+ 'group_id': json_home.Parameters.GROUP_ID}},
+ json_home.build_v3_resource_relation('domain_user_role'): {
+ 'href-template':
+ '/domains/{domain_id}/users/{user_id}/roles/{role_id}',
+ 'href-vars': {
+ 'domain_id': json_home.Parameters.DOMAIN_ID,
+ 'role_id': json_home.Parameters.ROLE_ID,
+ 'user_id': json_home.Parameters.USER_ID, }},
+ json_home.build_v3_resource_relation('domain_user_roles'): {
+ 'href-template': '/domains/{domain_id}/users/{user_id}/roles',
+ 'href-vars': {
+ 'domain_id': json_home.Parameters.DOMAIN_ID,
+ 'user_id': json_home.Parameters.USER_ID, }},
+ json_home.build_v3_resource_relation('domains'): {'href': '/domains'},
+ json_home.build_v3_resource_relation('endpoint'): {
+ 'href-template': '/endpoints/{endpoint_id}',
+ 'href-vars': {
+ 'endpoint_id':
+ json_home.build_v3_parameter_relation('endpoint_id'), }},
+ json_home.build_v3_resource_relation('endpoints'): {
+ 'href': '/endpoints'},
+ _build_ec2tokens_relation(resource_name='ec2tokens'): {
+ 'href': '/ec2tokens'},
+ _build_ec2tokens_relation(resource_name='user_credential'): {
+ 'href-template': '/users/{user_id}/credentials/OS-EC2/{credential_id}',
+ 'href-vars': {
+ 'credential_id': json_home.build_v3_extension_parameter_relation(
+ 'OS-EC2', '1.0', 'credential_id'),
+ 'user_id': json_home.Parameters.USER_ID, }},
+ _build_ec2tokens_relation(resource_name='user_credentials'): {
+ 'href-template': '/users/{user_id}/credentials/OS-EC2',
+ 'href-vars': {
+ 'user_id': json_home.Parameters.USER_ID, }},
+ REVOCATIONS_RELATION: {
+ 'href': '/auth/tokens/OS-PKI/revoked'},
+ 'http://docs.openstack.org/api/openstack-identity/3/ext/OS-REVOKE/1.0/rel/'
+ 'events': {
+ 'href': '/OS-REVOKE/events'},
+ _build_simple_cert_relation(resource_name='ca_certificate'): {
+ 'href': '/OS-SIMPLE-CERT/ca'},
+ _build_simple_cert_relation(resource_name='certificates'): {
+ 'href': '/OS-SIMPLE-CERT/certificates'},
+ _build_trust_relation(resource_name='trust'):
+ {
+ 'href-template': '/OS-TRUST/trusts/{trust_id}',
+ 'href-vars': {'trust_id': TRUST_ID_PARAMETER_RELATION, }},
+ _build_trust_relation(resource_name='trust_role'): {
+ 'href-template': '/OS-TRUST/trusts/{trust_id}/roles/{role_id}',
+ 'href-vars': {
+ 'role_id': json_home.Parameters.ROLE_ID,
+ 'trust_id': TRUST_ID_PARAMETER_RELATION, }},
+ _build_trust_relation(resource_name='trust_roles'): {
+ 'href-template': '/OS-TRUST/trusts/{trust_id}/roles',
+ 'href-vars': {'trust_id': TRUST_ID_PARAMETER_RELATION, }},
+ _build_trust_relation(resource_name='trusts'): {
+ 'href': '/OS-TRUST/trusts'},
+ 'http://docs.openstack.org/api/openstack-identity/3/ext/s3tokens/1.0/rel/'
+ 's3tokens': {
+ 'href': '/s3tokens'},
+ json_home.build_v3_resource_relation('group'): {
+ 'href-template': '/groups/{group_id}',
+ 'href-vars': {
+ 'group_id': json_home.Parameters.GROUP_ID, }},
+ json_home.build_v3_resource_relation('group_user'): {
+ 'href-template': '/groups/{group_id}/users/{user_id}',
+ 'href-vars': {
+ 'group_id': json_home.Parameters.GROUP_ID,
+ 'user_id': json_home.Parameters.USER_ID, }},
+ json_home.build_v3_resource_relation('group_users'): {
+ 'href-template': '/groups/{group_id}/users',
+ 'href-vars': {'group_id': json_home.Parameters.GROUP_ID, }},
+ json_home.build_v3_resource_relation('groups'): {'href': '/groups'},
+ json_home.build_v3_resource_relation('policies'): {
+ 'href': '/policies'},
+ json_home.build_v3_resource_relation('policy'): {
+ 'href-template': '/policies/{policy_id}',
+ 'href-vars': {
+ 'policy_id':
+ json_home.build_v3_parameter_relation('policy_id'), }},
+ json_home.build_v3_resource_relation('project'): {
+ 'href-template': '/projects/{project_id}',
+ 'href-vars': {
+ 'project_id': json_home.Parameters.PROJECT_ID, }},
+ json_home.build_v3_resource_relation('project_group_role'): {
+ 'href-template':
+ '/projects/{project_id}/groups/{group_id}/roles/{role_id}',
+ 'href-vars': {
+ 'group_id': json_home.Parameters.GROUP_ID,
+ 'project_id': json_home.Parameters.PROJECT_ID,
+ 'role_id': json_home.Parameters.ROLE_ID, }},
+ json_home.build_v3_resource_relation('project_group_roles'): {
+ 'href-template': '/projects/{project_id}/groups/{group_id}/roles',
+ 'href-vars': {
+ 'group_id': json_home.Parameters.GROUP_ID,
+ 'project_id': json_home.Parameters.PROJECT_ID, }},
+ json_home.build_v3_resource_relation('project_user_role'): {
+ 'href-template':
+ '/projects/{project_id}/users/{user_id}/roles/{role_id}',
+ 'href-vars': {
+ 'project_id': json_home.Parameters.PROJECT_ID,
+ 'role_id': json_home.Parameters.ROLE_ID,
+ 'user_id': json_home.Parameters.USER_ID, }},
+ json_home.build_v3_resource_relation('project_user_roles'): {
+ 'href-template': '/projects/{project_id}/users/{user_id}/roles',
+ 'href-vars': {
+ 'project_id': json_home.Parameters.PROJECT_ID,
+ 'user_id': json_home.Parameters.USER_ID, }},
+ json_home.build_v3_resource_relation('projects'): {
+ 'href': '/projects'},
+ json_home.build_v3_resource_relation('region'): {
+ 'href-template': '/regions/{region_id}',
+ 'href-vars': {
+ 'region_id':
+ json_home.build_v3_parameter_relation('region_id'), }},
+ json_home.build_v3_resource_relation('regions'): {'href': '/regions'},
+ json_home.build_v3_resource_relation('role'): {
+ 'href-template': '/roles/{role_id}',
+ 'href-vars': {
+ 'role_id': json_home.Parameters.ROLE_ID, }},
+ json_home.build_v3_resource_relation('role_assignments'): {
+ 'href': '/role_assignments'},
+ json_home.build_v3_resource_relation('roles'): {'href': '/roles'},
+ json_home.build_v3_resource_relation('service'): {
+ 'href-template': '/services/{service_id}',
+ 'href-vars': {
+ 'service_id':
+ json_home.build_v3_parameter_relation('service_id')}},
+ json_home.build_v3_resource_relation('services'): {
+ 'href': '/services'},
+ json_home.build_v3_resource_relation('user'): {
+ 'href-template': '/users/{user_id}',
+ 'href-vars': {
+ 'user_id': json_home.Parameters.USER_ID, }},
+ json_home.build_v3_resource_relation('user_change_password'): {
+ 'href-template': '/users/{user_id}/password',
+ 'href-vars': {'user_id': json_home.Parameters.USER_ID, }},
+ json_home.build_v3_resource_relation('user_groups'): {
+ 'href-template': '/users/{user_id}/groups',
+ 'href-vars': {'user_id': json_home.Parameters.USER_ID, }},
+ json_home.build_v3_resource_relation('user_projects'): {
+ 'href-template': '/users/{user_id}/projects',
+ 'href-vars': {'user_id': json_home.Parameters.USER_ID, }},
+ json_home.build_v3_resource_relation('users'): {'href': '/users'},
+ _build_federation_rel(resource_name='domains'): {
+ 'href': '/OS-FEDERATION/domains'},
+ _build_federation_rel(resource_name='websso'): {
+ 'href-template': '/auth/OS-FEDERATION/websso/{protocol_id}',
+ 'href-vars': {
+ 'protocol_id': PROTOCOL_ID_PARAM_RELATION, }},
+ _build_federation_rel(resource_name='projects'): {
+ 'href': '/OS-FEDERATION/projects'},
+ _build_federation_rel(resource_name='saml2'): {
+ 'href': '/auth/OS-FEDERATION/saml2'},
+ _build_federation_rel(resource_name='metadata'): {
+ 'href': '/OS-FEDERATION/saml2/metadata'},
+ _build_federation_rel(resource_name='identity_providers'): {
+ 'href': '/OS-FEDERATION/identity_providers'},
+ _build_federation_rel(resource_name='service_providers'): {
+ 'href': '/OS-FEDERATION/service_providers'},
+ _build_federation_rel(resource_name='mappings'): {
+ 'href': '/OS-FEDERATION/mappings'},
+ _build_federation_rel(resource_name='identity_provider'):
+ {
+ 'href-template': '/OS-FEDERATION/identity_providers/{idp_id}',
+ 'href-vars': {'idp_id': IDP_ID_PARAMETER_RELATION, }},
+ _build_federation_rel(resource_name='service_provider'):
+ {
+ 'href-template': '/OS-FEDERATION/service_providers/{sp_id}',
+ 'href-vars': {'sp_id': SP_ID_PARAMETER_RELATION, }},
+ _build_federation_rel(resource_name='mapping'):
+ {
+ 'href-template': '/OS-FEDERATION/mappings/{mapping_id}',
+ 'href-vars': {'mapping_id': MAPPING_ID_PARAM_RELATION, }},
+ _build_federation_rel(resource_name='identity_provider_protocol'): {
+ 'href-template': BASE_IDP_PROTOCOL + '/{protocol_id}',
+ 'href-vars': {
+ 'idp_id': IDP_ID_PARAMETER_RELATION,
+ 'protocol_id': PROTOCOL_ID_PARAM_RELATION, }},
+ _build_federation_rel(resource_name='identity_provider_protocols'): {
+ 'href-template': BASE_IDP_PROTOCOL,
+ 'href-vars': {
+ 'idp_id': IDP_ID_PARAMETER_RELATION}},
+ # TODO(stevemar): Update href-vars when bug 1420125 is resolved.
+ _build_federation_rel(resource_name='identity_provider_protocol_auth'): {
+ 'href-template': FEDERATED_AUTH_URL,
+ 'href-vars': {
+ 'identity_provider': IDP_ID_PARAMETER_RELATION,
+ 'protocol': PROTOCOL_ID_PARAM_RELATION, }},
+ _build_oauth1_rel(resource_name='access_tokens'): {
+ 'href': '/OS-OAUTH1/access_token'},
+ _build_oauth1_rel(resource_name='request_tokens'): {
+ 'href': '/OS-OAUTH1/request_token'},
+ _build_oauth1_rel(resource_name='consumers'): {
+ 'href': '/OS-OAUTH1/consumers'},
+ _build_oauth1_rel(resource_name='authorize_request_token'):
+ {
+ 'href-template': '/OS-OAUTH1/authorize/{request_token_id}',
+ 'href-vars': {'request_token_id':
+ REQUEST_TOKEN_ID_PARAMETER_RELATION, }},
+ _build_oauth1_rel(resource_name='consumer'):
+ {
+ 'href-template': '/OS-OAUTH1/consumers/{consumer_id}',
+ 'href-vars': {'consumer_id': CONSUMER_ID_PARAMETER_RELATION, }},
+ _build_oauth1_rel(resource_name='user_access_token'):
+ {
+ 'href-template': BASE_ACCESS_TOKEN,
+ 'href-vars': {'user_id': json_home.Parameters.USER_ID,
+ 'access_token_id':
+ ACCESS_TOKEN_ID_PARAMETER_RELATION, }},
+ _build_oauth1_rel(resource_name='user_access_tokens'):
+ {
+ 'href-template': '/users/{user_id}/OS-OAUTH1/access_tokens',
+ 'href-vars': {'user_id': json_home.Parameters.USER_ID, }},
+ _build_oauth1_rel(resource_name='user_access_token_role'):
+ {
+ 'href-template': BASE_ACCESS_TOKEN + '/roles/{role_id}',
+ 'href-vars': {'user_id': json_home.Parameters.USER_ID,
+ 'role_id': json_home.Parameters.ROLE_ID,
+ 'access_token_id':
+ ACCESS_TOKEN_ID_PARAMETER_RELATION, }},
+ _build_oauth1_rel(resource_name='user_access_token_roles'):
+ {
+ 'href-template': BASE_ACCESS_TOKEN + '/roles',
+ 'href-vars': {'user_id': json_home.Parameters.USER_ID,
+ 'access_token_id':
+ ACCESS_TOKEN_ID_PARAMETER_RELATION, }},
+ _build_ep_policy_rel(resource_name='endpoint_policy'):
+ {
+ 'href-template': '/endpoints/{endpoint_id}/OS-ENDPOINT-POLICY/policy',
+ 'href-vars': {'endpoint_id': json_home.Parameters.ENDPOINT_ID, }},
+ _build_ep_policy_rel(resource_name='endpoint_policy_association'):
+ {
+ 'href-template': BASE_EP_POLICY + '/endpoints/{endpoint_id}',
+ 'href-vars': {'endpoint_id': json_home.Parameters.ENDPOINT_ID,
+ 'policy_id': json_home.Parameters.POLICY_ID, }},
+ _build_ep_policy_rel(resource_name='policy_endpoints'):
+ {
+ 'href-template': BASE_EP_POLICY + '/endpoints',
+ 'href-vars': {'policy_id': json_home.Parameters.POLICY_ID, }},
+ _build_ep_policy_rel(
+ resource_name='region_and_service_policy_association'):
+ {
+ 'href-template': (BASE_EP_POLICY +
+ '/services/{service_id}/regions/{region_id}'),
+ 'href-vars': {'policy_id': json_home.Parameters.POLICY_ID,
+ 'service_id': json_home.Parameters.SERVICE_ID,
+ 'region_id': json_home.Parameters.REGION_ID, }},
+ _build_ep_policy_rel(resource_name='service_policy_association'):
+ {
+ 'href-template': BASE_EP_POLICY + '/services/{service_id}',
+ 'href-vars': {'policy_id': json_home.Parameters.POLICY_ID,
+ 'service_id': json_home.Parameters.SERVICE_ID, }},
+ _build_ep_filter_rel(resource_name='endpoint_group'):
+ {
+ 'href-template': '/OS-EP-FILTER/endpoint_groups/{endpoint_group_id}',
+ 'href-vars': {'endpoint_group_id':
+ ENDPOINT_GROUP_ID_PARAMETER_RELATION, }},
+ _build_ep_filter_rel(
+ resource_name='endpoint_group_to_project_association'):
+ {
+ 'href-template': BASE_EP_FILTER + '/projects/{project_id}',
+ 'href-vars': {'endpoint_group_id':
+ ENDPOINT_GROUP_ID_PARAMETER_RELATION,
+ 'project_id': json_home.Parameters.PROJECT_ID, }},
+ _build_ep_filter_rel(resource_name='endpoint_groups'):
+ {'href': '/OS-EP-FILTER/endpoint_groups'},
+ _build_ep_filter_rel(resource_name='endpoint_projects'):
+ {
+ 'href-template': '/OS-EP-FILTER/endpoints/{endpoint_id}/projects',
+ 'href-vars': {'endpoint_id': json_home.Parameters.ENDPOINT_ID, }},
+ _build_ep_filter_rel(resource_name='endpoints_in_endpoint_group'):
+ {
+ 'href-template': BASE_EP_FILTER + '/endpoints',
+ 'href-vars': {'endpoint_group_id':
+ ENDPOINT_GROUP_ID_PARAMETER_RELATION, }},
+ _build_ep_filter_rel(resource_name='project_endpoint'):
+ {
+ 'href-template': ('/OS-EP-FILTER/projects/{project_id}'
+ '/endpoints/{endpoint_id}'),
+ 'href-vars': {'endpoint_id': json_home.Parameters.ENDPOINT_ID,
+ 'project_id': json_home.Parameters.PROJECT_ID, }},
+ _build_ep_filter_rel(resource_name='project_endpoints'):
+ {
+ 'href-template': '/OS-EP-FILTER/projects/{project_id}/endpoints',
+ 'href-vars': {'project_id': json_home.Parameters.PROJECT_ID, }},
+ _build_ep_filter_rel(
+ resource_name='projects_associated_with_endpoint_group'):
+ {
+ 'href-template': BASE_EP_FILTER + '/projects',
+ 'href-vars': {'endpoint_group_id':
+ ENDPOINT_GROUP_ID_PARAMETER_RELATION, }},
+ json_home.build_v3_resource_relation('domain_config'): {
+ 'href-template':
+ '/domains/{domain_id}/config',
+ 'href-vars': {
+ 'domain_id': json_home.Parameters.DOMAIN_ID},
+ 'hints': {'status': 'experimental'}},
+ json_home.build_v3_resource_relation('domain_config_group'): {
+ 'href-template':
+ '/domains/{domain_id}/config/{group}',
+ 'href-vars': {
+ 'domain_id': json_home.Parameters.DOMAIN_ID,
+ 'group': json_home.build_v3_parameter_relation('config_group')},
+ 'hints': {'status': 'experimental'}},
+ json_home.build_v3_resource_relation('domain_config_option'): {
+ 'href-template':
+ '/domains/{domain_id}/config/{group}/{option}',
+ 'href-vars': {
+ 'domain_id': json_home.Parameters.DOMAIN_ID,
+ 'group': json_home.build_v3_parameter_relation('config_group'),
+ 'option': json_home.build_v3_parameter_relation('config_option')},
+ 'hints': {'status': 'experimental'}},
+}
+
+
+# with os-inherit enabled, there's some more resources.
+
+build_os_inherit_relation = functools.partial(
+ json_home.build_v3_extension_resource_relation,
+ extension_name='OS-INHERIT', extension_version='1.0')
+
+V3_JSON_HOME_RESOURCES_INHERIT_ENABLED = dict(
+ V3_JSON_HOME_RESOURCES_INHERIT_DISABLED)
+V3_JSON_HOME_RESOURCES_INHERIT_ENABLED.update(
+ (
+ (
+ build_os_inherit_relation(
+ resource_name='domain_user_role_inherited_to_projects'),
+ {
+ 'href-template': '/OS-INHERIT/domains/{domain_id}/users/'
+ '{user_id}/roles/{role_id}/inherited_to_projects',
+ 'href-vars': {
+ 'domain_id': json_home.Parameters.DOMAIN_ID,
+ 'role_id': json_home.Parameters.ROLE_ID,
+ 'user_id': json_home.Parameters.USER_ID,
+ },
+ }
+ ),
+ (
+ build_os_inherit_relation(
+ resource_name='domain_group_role_inherited_to_projects'),
+ {
+ 'href-template': '/OS-INHERIT/domains/{domain_id}/groups/'
+ '{group_id}/roles/{role_id}/inherited_to_projects',
+ 'href-vars': {
+ 'domain_id': json_home.Parameters.DOMAIN_ID,
+ 'group_id': json_home.Parameters.GROUP_ID,
+ 'role_id': json_home.Parameters.ROLE_ID,
+ },
+ }
+ ),
+ (
+ build_os_inherit_relation(
+ resource_name='domain_user_roles_inherited_to_projects'),
+ {
+ 'href-template': '/OS-INHERIT/domains/{domain_id}/users/'
+ '{user_id}/roles/inherited_to_projects',
+ 'href-vars': {
+ 'domain_id': json_home.Parameters.DOMAIN_ID,
+ 'user_id': json_home.Parameters.USER_ID,
+ },
+ }
+ ),
+ (
+ build_os_inherit_relation(
+ resource_name='domain_group_roles_inherited_to_projects'),
+ {
+ 'href-template': '/OS-INHERIT/domains/{domain_id}/groups/'
+ '{group_id}/roles/inherited_to_projects',
+ 'href-vars': {
+ 'domain_id': json_home.Parameters.DOMAIN_ID,
+ 'group_id': json_home.Parameters.GROUP_ID,
+ },
+ }
+ ),
+ (
+ build_os_inherit_relation(
+ resource_name='project_user_role_inherited_to_projects'),
+ {
+ 'href-template': '/OS-INHERIT/projects/{project_id}/users/'
+ '{user_id}/roles/{role_id}/inherited_to_projects',
+ 'href-vars': {
+ 'project_id': json_home.Parameters.PROJECT_ID,
+ 'role_id': json_home.Parameters.ROLE_ID,
+ 'user_id': json_home.Parameters.USER_ID,
+ },
+ }
+ ),
+ (
+ build_os_inherit_relation(
+ resource_name='project_group_role_inherited_to_projects'),
+ {
+ 'href-template': '/OS-INHERIT/projects/{project_id}/groups/'
+ '{group_id}/roles/{role_id}/inherited_to_projects',
+ 'href-vars': {
+ 'project_id': json_home.Parameters.PROJECT_ID,
+ 'group_id': json_home.Parameters.GROUP_ID,
+ 'role_id': json_home.Parameters.ROLE_ID,
+ },
+ }
+ ),
+ )
+)
+
+
+class _VersionsEqual(tt_matchers.MatchesListwise):
+ def __init__(self, expected):
+ super(_VersionsEqual, self).__init__([
+ tt_matchers.KeysEqual(expected),
+ tt_matchers.KeysEqual(expected['versions']),
+ tt_matchers.HasLength(len(expected['versions']['values'])),
+ tt_matchers.ContainsAll(expected['versions']['values']),
+ ])
+
+ def match(self, other):
+ return super(_VersionsEqual, self).match([
+ other,
+ other['versions'],
+ other['versions']['values'],
+ other['versions']['values'],
+ ])
+
+
+class VersionTestCase(tests.TestCase):
+ def setUp(self):
+ super(VersionTestCase, self).setUp()
+ self.load_backends()
+ self.public_app = self.loadapp('keystone', 'main')
+ self.admin_app = self.loadapp('keystone', 'admin')
+
+ self.config_fixture.config(
+ public_endpoint='http://localhost:%(public_port)d',
+ admin_endpoint='http://localhost:%(admin_port)d')
+
+ def config_overrides(self):
+ super(VersionTestCase, self).config_overrides()
+ port = random.randint(10000, 30000)
+ self.config_fixture.config(group='eventlet_server', public_port=port,
+ admin_port=port)
+
+ def _paste_in_port(self, response, port):
+ for link in response['links']:
+ if link['rel'] == 'self':
+ link['href'] = port
+
+ def test_public_versions(self):
+ client = tests.TestClient(self.public_app)
+ resp = client.get('/')
+ self.assertEqual(300, resp.status_int)
+ data = jsonutils.loads(resp.body)
+ expected = VERSIONS_RESPONSE
+ for version in expected['versions']['values']:
+ if version['id'] == 'v3.0':
+ self._paste_in_port(
+ version, 'http://localhost:%s/v3/' %
+ CONF.eventlet_server.public_port)
+ elif version['id'] == 'v2.0':
+ self._paste_in_port(
+ version, 'http://localhost:%s/v2.0/' %
+ CONF.eventlet_server.public_port)
+ self.assertThat(data, _VersionsEqual(expected))
+
+ def test_admin_versions(self):
+ client = tests.TestClient(self.admin_app)
+ resp = client.get('/')
+ self.assertEqual(300, resp.status_int)
+ data = jsonutils.loads(resp.body)
+ expected = VERSIONS_RESPONSE
+ for version in expected['versions']['values']:
+ if version['id'] == 'v3.0':
+ self._paste_in_port(
+ version, 'http://localhost:%s/v3/' %
+ CONF.eventlet_server.admin_port)
+ elif version['id'] == 'v2.0':
+ self._paste_in_port(
+ version, 'http://localhost:%s/v2.0/' %
+ CONF.eventlet_server.admin_port)
+ self.assertThat(data, _VersionsEqual(expected))
+
+ def test_use_site_url_if_endpoint_unset(self):
+ self.config_fixture.config(public_endpoint=None, admin_endpoint=None)
+
+ for app in (self.public_app, self.admin_app):
+ client = tests.TestClient(app)
+ resp = client.get('/')
+ self.assertEqual(300, resp.status_int)
+ data = jsonutils.loads(resp.body)
+ expected = VERSIONS_RESPONSE
+ for version in expected['versions']['values']:
+ # localhost happens to be the site url for tests
+ if version['id'] == 'v3.0':
+ self._paste_in_port(
+ version, 'http://localhost/v3/')
+ elif version['id'] == 'v2.0':
+ self._paste_in_port(
+ version, 'http://localhost/v2.0/')
+ self.assertThat(data, _VersionsEqual(expected))
+
+ def test_public_version_v2(self):
+ client = tests.TestClient(self.public_app)
+ resp = client.get('/v2.0/')
+ self.assertEqual(200, resp.status_int)
+ data = jsonutils.loads(resp.body)
+ expected = v2_VERSION_RESPONSE
+ self._paste_in_port(expected['version'],
+ 'http://localhost:%s/v2.0/' %
+ CONF.eventlet_server.public_port)
+ self.assertEqual(expected, data)
+
+ def test_admin_version_v2(self):
+ client = tests.TestClient(self.admin_app)
+ resp = client.get('/v2.0/')
+ self.assertEqual(200, resp.status_int)
+ data = jsonutils.loads(resp.body)
+ expected = v2_VERSION_RESPONSE
+ self._paste_in_port(expected['version'],
+ 'http://localhost:%s/v2.0/' %
+ CONF.eventlet_server.admin_port)
+ self.assertEqual(expected, data)
+
+ def test_use_site_url_if_endpoint_unset_v2(self):
+ self.config_fixture.config(public_endpoint=None, admin_endpoint=None)
+ for app in (self.public_app, self.admin_app):
+ client = tests.TestClient(app)
+ resp = client.get('/v2.0/')
+ self.assertEqual(200, resp.status_int)
+ data = jsonutils.loads(resp.body)
+ expected = v2_VERSION_RESPONSE
+ self._paste_in_port(expected['version'], 'http://localhost/v2.0/')
+ self.assertEqual(data, expected)
+
+ def test_public_version_v3(self):
+ client = tests.TestClient(self.public_app)
+ resp = client.get('/v3/')
+ self.assertEqual(200, resp.status_int)
+ data = jsonutils.loads(resp.body)
+ expected = v3_VERSION_RESPONSE
+ self._paste_in_port(expected['version'],
+ 'http://localhost:%s/v3/' %
+ CONF.eventlet_server.public_port)
+ self.assertEqual(expected, data)
+
+ def test_admin_version_v3(self):
+ client = tests.TestClient(self.public_app)
+ resp = client.get('/v3/')
+ self.assertEqual(200, resp.status_int)
+ data = jsonutils.loads(resp.body)
+ expected = v3_VERSION_RESPONSE
+ self._paste_in_port(expected['version'],
+ 'http://localhost:%s/v3/' %
+ CONF.eventlet_server.admin_port)
+ self.assertEqual(expected, data)
+
+ def test_use_site_url_if_endpoint_unset_v3(self):
+ self.config_fixture.config(public_endpoint=None, admin_endpoint=None)
+ for app in (self.public_app, self.admin_app):
+ client = tests.TestClient(app)
+ resp = client.get('/v3/')
+ self.assertEqual(200, resp.status_int)
+ data = jsonutils.loads(resp.body)
+ expected = v3_VERSION_RESPONSE
+ self._paste_in_port(expected['version'], 'http://localhost/v3/')
+ self.assertEqual(expected, data)
+
+ @mock.patch.object(controllers, '_VERSIONS', ['v3'])
+ def test_v2_disabled(self):
+ client = tests.TestClient(self.public_app)
+ # request to /v2.0 should fail
+ resp = client.get('/v2.0/')
+ self.assertEqual(404, resp.status_int)
+
+ # request to /v3 should pass
+ resp = client.get('/v3/')
+ self.assertEqual(200, resp.status_int)
+ data = jsonutils.loads(resp.body)
+ expected = v3_VERSION_RESPONSE
+ self._paste_in_port(expected['version'],
+ 'http://localhost:%s/v3/' %
+ CONF.eventlet_server.public_port)
+ self.assertEqual(expected, data)
+
+ # only v3 information should be displayed by requests to /
+ v3_only_response = {
+ "versions": {
+ "values": [
+ v3_EXPECTED_RESPONSE
+ ]
+ }
+ }
+ self._paste_in_port(v3_only_response['versions']['values'][0],
+ 'http://localhost:%s/v3/' %
+ CONF.eventlet_server.public_port)
+ resp = client.get('/')
+ self.assertEqual(300, resp.status_int)
+ data = jsonutils.loads(resp.body)
+ self.assertEqual(v3_only_response, data)
+
+ @mock.patch.object(controllers, '_VERSIONS', ['v2.0'])
+ def test_v3_disabled(self):
+ client = tests.TestClient(self.public_app)
+ # request to /v3 should fail
+ resp = client.get('/v3/')
+ self.assertEqual(404, resp.status_int)
+
+ # request to /v2.0 should pass
+ resp = client.get('/v2.0/')
+ self.assertEqual(200, resp.status_int)
+ data = jsonutils.loads(resp.body)
+ expected = v2_VERSION_RESPONSE
+ self._paste_in_port(expected['version'],
+ 'http://localhost:%s/v2.0/' %
+ CONF.eventlet_server.public_port)
+ self.assertEqual(expected, data)
+
+ # only v2 information should be displayed by requests to /
+ v2_only_response = {
+ "versions": {
+ "values": [
+ v2_EXPECTED_RESPONSE
+ ]
+ }
+ }
+ self._paste_in_port(v2_only_response['versions']['values'][0],
+ 'http://localhost:%s/v2.0/' %
+ CONF.eventlet_server.public_port)
+ resp = client.get('/')
+ self.assertEqual(300, resp.status_int)
+ data = jsonutils.loads(resp.body)
+ self.assertEqual(v2_only_response, data)
+
+ def _test_json_home(self, path, exp_json_home_data):
+ client = tests.TestClient(self.public_app)
+ resp = client.get(path, headers={'Accept': 'application/json-home'})
+
+ self.assertThat(resp.status, tt_matchers.Equals('200 OK'))
+ self.assertThat(resp.headers['Content-Type'],
+ tt_matchers.Equals('application/json-home'))
+
+ self.assertThat(jsonutils.loads(resp.body),
+ tt_matchers.Equals(exp_json_home_data))
+
+ def test_json_home_v3(self):
+ # If the request is /v3 and the Accept header is application/json-home
+ # then the server responds with a JSON Home document.
+
+ exp_json_home_data = {
+ 'resources': V3_JSON_HOME_RESOURCES_INHERIT_DISABLED}
+
+ self._test_json_home('/v3', exp_json_home_data)
+
+ def test_json_home_root(self):
+ # If the request is / and the Accept header is application/json-home
+ # then the server responds with a JSON Home document.
+
+ exp_json_home_data = copy.deepcopy({
+ 'resources': V3_JSON_HOME_RESOURCES_INHERIT_DISABLED})
+ json_home.translate_urls(exp_json_home_data, '/v3')
+
+ self._test_json_home('/', exp_json_home_data)
+
+ def test_accept_type_handling(self):
+ # Accept headers with multiple types and qvalues are handled.
+
+ def make_request(accept_types=None):
+ client = tests.TestClient(self.public_app)
+ headers = None
+ if accept_types:
+ headers = {'Accept': accept_types}
+ resp = client.get('/v3', headers=headers)
+ self.assertThat(resp.status, tt_matchers.Equals('200 OK'))
+ return resp.headers['Content-Type']
+
+ JSON = controllers.MimeTypes.JSON
+ JSON_HOME = controllers.MimeTypes.JSON_HOME
+
+ JSON_MATCHER = tt_matchers.Equals(JSON)
+ JSON_HOME_MATCHER = tt_matchers.Equals(JSON_HOME)
+
+ # Default is JSON.
+ self.assertThat(make_request(), JSON_MATCHER)
+
+ # Can request JSON and get JSON.
+ self.assertThat(make_request(JSON), JSON_MATCHER)
+
+ # Can request JSONHome and get JSONHome.
+ self.assertThat(make_request(JSON_HOME), JSON_HOME_MATCHER)
+
+ # If request JSON, JSON Home get JSON.
+ accept_types = '%s, %s' % (JSON, JSON_HOME)
+ self.assertThat(make_request(accept_types), JSON_MATCHER)
+
+ # If request JSON Home, JSON get JSON.
+ accept_types = '%s, %s' % (JSON_HOME, JSON)
+ self.assertThat(make_request(accept_types), JSON_MATCHER)
+
+ # If request JSON Home, JSON;q=0.5 get JSON Home.
+ accept_types = '%s, %s;q=0.5' % (JSON_HOME, JSON)
+ self.assertThat(make_request(accept_types), JSON_HOME_MATCHER)
+
+ # If request some unknown mime-type, get JSON.
+ self.assertThat(make_request(self.getUniqueString()), JSON_MATCHER)
+
+ @mock.patch.object(controllers, '_VERSIONS', [])
+ def test_no_json_home_document_returned_when_v3_disabled(self):
+ json_home_document = controllers.request_v3_json_home('some_prefix')
+ expected_document = {'resources': {}}
+ self.assertEqual(expected_document, json_home_document)
+
+ def test_extension_property_method_returns_none(self):
+ extension_obj = controllers.Extensions()
+ extensions_property = extension_obj.extensions
+ self.assertIsNone(extensions_property)
+
+
+class VersionSingleAppTestCase(tests.TestCase):
+ """Tests running with a single application loaded.
+
+ These are important because when Keystone is running in Apache httpd
+ there's only one application loaded for each instance.
+
+ """
+
+ def setUp(self):
+ super(VersionSingleAppTestCase, self).setUp()
+ self.load_backends()
+
+ self.config_fixture.config(
+ public_endpoint='http://localhost:%(public_port)d',
+ admin_endpoint='http://localhost:%(admin_port)d')
+
+ def config_overrides(self):
+ super(VersionSingleAppTestCase, self).config_overrides()
+ port = random.randint(10000, 30000)
+ self.config_fixture.config(group='eventlet_server', public_port=port,
+ admin_port=port)
+
+ def _paste_in_port(self, response, port):
+ for link in response['links']:
+ if link['rel'] == 'self':
+ link['href'] = port
+
+ def _test_version(self, app_name):
+ app = self.loadapp('keystone', app_name)
+ client = tests.TestClient(app)
+ resp = client.get('/')
+ self.assertEqual(300, resp.status_int)
+ data = jsonutils.loads(resp.body)
+ expected = VERSIONS_RESPONSE
+ for version in expected['versions']['values']:
+ if version['id'] == 'v3.0':
+ self._paste_in_port(
+ version, 'http://localhost:%s/v3/' %
+ CONF.eventlet_server.public_port)
+ elif version['id'] == 'v2.0':
+ self._paste_in_port(
+ version, 'http://localhost:%s/v2.0/' %
+ CONF.eventlet_server.public_port)
+ self.assertThat(data, _VersionsEqual(expected))
+
+ def test_public(self):
+ self._test_version('main')
+
+ def test_admin(self):
+ self._test_version('admin')
+
+
+class VersionInheritEnabledTestCase(tests.TestCase):
+ def setUp(self):
+ super(VersionInheritEnabledTestCase, self).setUp()
+ self.load_backends()
+ self.public_app = self.loadapp('keystone', 'main')
+ self.admin_app = self.loadapp('keystone', 'admin')
+
+ self.config_fixture.config(
+ public_endpoint='http://localhost:%(public_port)d',
+ admin_endpoint='http://localhost:%(admin_port)d')
+
+ def config_overrides(self):
+ super(VersionInheritEnabledTestCase, self).config_overrides()
+ port = random.randint(10000, 30000)
+ self.config_fixture.config(group='eventlet_server', public_port=port,
+ admin_port=port)
+
+ self.config_fixture.config(group='os_inherit', enabled=True)
+
+ def test_json_home_v3(self):
+ # If the request is /v3 and the Accept header is application/json-home
+ # then the server responds with a JSON Home document.
+
+ client = tests.TestClient(self.public_app)
+ resp = client.get('/v3/', headers={'Accept': 'application/json-home'})
+
+ self.assertThat(resp.status, tt_matchers.Equals('200 OK'))
+ self.assertThat(resp.headers['Content-Type'],
+ tt_matchers.Equals('application/json-home'))
+
+ exp_json_home_data = {
+ 'resources': V3_JSON_HOME_RESOURCES_INHERIT_ENABLED}
+
+ self.assertThat(jsonutils.loads(resp.body),
+ tt_matchers.Equals(exp_json_home_data))
+
+
+class VersionBehindSslTestCase(tests.TestCase):
+ def setUp(self):
+ super(VersionBehindSslTestCase, self).setUp()
+ self.load_backends()
+ self.public_app = self.loadapp('keystone', 'main')
+
+ def config_overrides(self):
+ super(VersionBehindSslTestCase, self).config_overrides()
+ self.config_fixture.config(
+ secure_proxy_ssl_header='HTTP_X_FORWARDED_PROTO')
+
+ def _paste_in_port(self, response, port):
+ for link in response['links']:
+ if link['rel'] == 'self':
+ link['href'] = port
+
+ def _get_expected(self, host):
+ expected = VERSIONS_RESPONSE
+ for version in expected['versions']['values']:
+ if version['id'] == 'v3.0':
+ self._paste_in_port(version, host + 'v3/')
+ elif version['id'] == 'v2.0':
+ self._paste_in_port(version, host + 'v2.0/')
+ return expected
+
+ def test_versions_without_headers(self):
+ client = tests.TestClient(self.public_app)
+ host_name = 'host-%d' % random.randint(10, 30)
+ host_port = random.randint(10000, 30000)
+ host = 'http://%s:%s/' % (host_name, host_port)
+ resp = client.get(host)
+ self.assertEqual(300, resp.status_int)
+ data = jsonutils.loads(resp.body)
+ expected = self._get_expected(host)
+ self.assertThat(data, _VersionsEqual(expected))
+
+ def test_versions_with_header(self):
+ client = tests.TestClient(self.public_app)
+ host_name = 'host-%d' % random.randint(10, 30)
+ host_port = random.randint(10000, 30000)
+ resp = client.get('http://%s:%s/' % (host_name, host_port),
+ headers={'X-Forwarded-Proto': 'https'})
+ self.assertEqual(300, resp.status_int)
+ data = jsonutils.loads(resp.body)
+ expected = self._get_expected('https://%s:%s/' % (host_name,
+ host_port))
+ self.assertThat(data, _VersionsEqual(expected))
diff --git a/keystone-moon/keystone/tests/unit/test_wsgi.py b/keystone-moon/keystone/tests/unit/test_wsgi.py
new file mode 100644
index 00000000..1785dd00
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/test_wsgi.py
@@ -0,0 +1,427 @@
+# Copyright 2012 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import gettext
+import socket
+import uuid
+
+import mock
+import oslo_i18n
+from oslo_serialization import jsonutils
+import six
+from testtools import matchers
+import webob
+
+from keystone.common import environment
+from keystone.common import wsgi
+from keystone import exception
+from keystone.tests import unit as tests
+
+
+class FakeApp(wsgi.Application):
+ def index(self, context):
+ return {'a': 'b'}
+
+
+class FakeAttributeCheckerApp(wsgi.Application):
+ def index(self, context):
+ return context['query_string']
+
+ def assert_attribute(self, body, attr):
+ """Asserts that the given request has a certain attribute."""
+ ref = jsonutils.loads(body)
+ self._require_attribute(ref, attr)
+
+ def assert_attributes(self, body, attr):
+ """Asserts that the given request has a certain set attributes."""
+ ref = jsonutils.loads(body)
+ self._require_attributes(ref, attr)
+
+
+class BaseWSGITest(tests.TestCase):
+ def setUp(self):
+ self.app = FakeApp()
+ super(BaseWSGITest, self).setUp()
+
+ def _make_request(self, url='/'):
+ req = webob.Request.blank(url)
+ args = {'action': 'index', 'controller': None}
+ req.environ['wsgiorg.routing_args'] = [None, args]
+ return req
+
+
+class ApplicationTest(BaseWSGITest):
+ def test_response_content_type(self):
+ req = self._make_request()
+ resp = req.get_response(self.app)
+ self.assertEqual(resp.content_type, 'application/json')
+
+ def test_query_string_available(self):
+ class FakeApp(wsgi.Application):
+ def index(self, context):
+ return context['query_string']
+ req = self._make_request(url='/?1=2')
+ resp = req.get_response(FakeApp())
+ self.assertEqual(jsonutils.loads(resp.body), {'1': '2'})
+
+ def test_headers_available(self):
+ class FakeApp(wsgi.Application):
+ def index(self, context):
+ return context['headers']
+
+ app = FakeApp()
+ req = self._make_request(url='/?1=2')
+ req.headers['X-Foo'] = "bar"
+ resp = req.get_response(app)
+ self.assertIn('X-Foo', eval(resp.body))
+
+ def test_render_response(self):
+ data = {'attribute': 'value'}
+ body = b'{"attribute": "value"}'
+
+ resp = wsgi.render_response(body=data)
+ self.assertEqual('200 OK', resp.status)
+ self.assertEqual(200, resp.status_int)
+ self.assertEqual(body, resp.body)
+ self.assertEqual('X-Auth-Token', resp.headers.get('Vary'))
+ self.assertEqual(str(len(body)), resp.headers.get('Content-Length'))
+
+ def test_render_response_custom_status(self):
+ resp = wsgi.render_response(status=(501, 'Not Implemented'))
+ self.assertEqual('501 Not Implemented', resp.status)
+ self.assertEqual(501, resp.status_int)
+
+ def test_successful_require_attribute(self):
+ app = FakeAttributeCheckerApp()
+ req = self._make_request(url='/?1=2')
+ resp = req.get_response(app)
+ app.assert_attribute(resp.body, '1')
+
+ def test_require_attribute_fail_if_attribute_not_present(self):
+ app = FakeAttributeCheckerApp()
+ req = self._make_request(url='/?1=2')
+ resp = req.get_response(app)
+ self.assertRaises(exception.ValidationError,
+ app.assert_attribute, resp.body, 'a')
+
+ def test_successful_require_multiple_attributes(self):
+ app = FakeAttributeCheckerApp()
+ req = self._make_request(url='/?a=1&b=2')
+ resp = req.get_response(app)
+ app.assert_attributes(resp.body, ['a', 'b'])
+
+ def test_attribute_missing_from_request(self):
+ app = FakeAttributeCheckerApp()
+ req = self._make_request(url='/?a=1&b=2')
+ resp = req.get_response(app)
+ ex = self.assertRaises(exception.ValidationError,
+ app.assert_attributes,
+ resp.body, ['a', 'missing_attribute'])
+ self.assertThat(six.text_type(ex),
+ matchers.Contains('missing_attribute'))
+
+ def test_no_required_attributes_present(self):
+ app = FakeAttributeCheckerApp()
+ req = self._make_request(url='/')
+ resp = req.get_response(app)
+
+ ex = self.assertRaises(exception.ValidationError,
+ app.assert_attributes, resp.body,
+ ['missing_attribute1', 'missing_attribute2'])
+ self.assertThat(six.text_type(ex),
+ matchers.Contains('missing_attribute1'))
+ self.assertThat(six.text_type(ex),
+ matchers.Contains('missing_attribute2'))
+
+ def test_render_response_custom_headers(self):
+ resp = wsgi.render_response(headers=[('Custom-Header', 'Some-Value')])
+ self.assertEqual('Some-Value', resp.headers.get('Custom-Header'))
+ self.assertEqual('X-Auth-Token', resp.headers.get('Vary'))
+
+ def test_render_response_no_body(self):
+ resp = wsgi.render_response()
+ self.assertEqual('204 No Content', resp.status)
+ self.assertEqual(204, resp.status_int)
+ self.assertEqual(b'', resp.body)
+ self.assertEqual('0', resp.headers.get('Content-Length'))
+ self.assertIsNone(resp.headers.get('Content-Type'))
+
+ def test_render_response_head_with_body(self):
+ resp = wsgi.render_response({'id': uuid.uuid4().hex}, method='HEAD')
+ self.assertEqual(200, resp.status_int)
+ self.assertEqual(b'', resp.body)
+ self.assertNotEqual(resp.headers.get('Content-Length'), '0')
+ self.assertEqual('application/json', resp.headers.get('Content-Type'))
+
+ def test_application_local_config(self):
+ class FakeApp(wsgi.Application):
+ def __init__(self, *args, **kwargs):
+ self.kwargs = kwargs
+
+ app = FakeApp.factory({}, testkey="test")
+ self.assertIn("testkey", app.kwargs)
+ self.assertEqual("test", app.kwargs["testkey"])
+
+ def test_render_exception(self):
+ e = exception.Unauthorized(message=u'\u7f51\u7edc')
+ resp = wsgi.render_exception(e)
+ self.assertEqual(401, resp.status_int)
+
+ def test_render_exception_host(self):
+ e = exception.Unauthorized(message=u'\u7f51\u7edc')
+ context = {'host_url': 'http://%s:5000' % uuid.uuid4().hex}
+ resp = wsgi.render_exception(e, context=context)
+
+ self.assertEqual(401, resp.status_int)
+
+
+class ExtensionRouterTest(BaseWSGITest):
+ def test_extensionrouter_local_config(self):
+ class FakeRouter(wsgi.ExtensionRouter):
+ def __init__(self, *args, **kwargs):
+ self.kwargs = kwargs
+
+ factory = FakeRouter.factory({}, testkey="test")
+ app = factory(self.app)
+ self.assertIn("testkey", app.kwargs)
+ self.assertEqual("test", app.kwargs["testkey"])
+
+
+class MiddlewareTest(BaseWSGITest):
+ def test_middleware_request(self):
+ class FakeMiddleware(wsgi.Middleware):
+ def process_request(self, req):
+ req.environ['fake_request'] = True
+ return req
+ req = self._make_request()
+ resp = FakeMiddleware(None)(req)
+ self.assertIn('fake_request', resp.environ)
+
+ def test_middleware_response(self):
+ class FakeMiddleware(wsgi.Middleware):
+ def process_response(self, request, response):
+ response.environ = {}
+ response.environ['fake_response'] = True
+ return response
+ req = self._make_request()
+ resp = FakeMiddleware(self.app)(req)
+ self.assertIn('fake_response', resp.environ)
+
+ def test_middleware_bad_request(self):
+ class FakeMiddleware(wsgi.Middleware):
+ def process_response(self, request, response):
+ raise exception.Unauthorized()
+
+ req = self._make_request()
+ req.environ['REMOTE_ADDR'] = '127.0.0.1'
+ resp = FakeMiddleware(self.app)(req)
+ self.assertEqual(exception.Unauthorized.code, resp.status_int)
+
+ def test_middleware_type_error(self):
+ class FakeMiddleware(wsgi.Middleware):
+ def process_response(self, request, response):
+ raise TypeError()
+
+ req = self._make_request()
+ req.environ['REMOTE_ADDR'] = '127.0.0.1'
+ resp = FakeMiddleware(self.app)(req)
+ # This is a validationerror type
+ self.assertEqual(exception.ValidationError.code, resp.status_int)
+
+ def test_middleware_exception_error(self):
+
+ exception_str = b'EXCEPTIONERROR'
+
+ class FakeMiddleware(wsgi.Middleware):
+ def process_response(self, request, response):
+ raise exception.UnexpectedError(exception_str)
+
+ def do_request():
+ req = self._make_request()
+ resp = FakeMiddleware(self.app)(req)
+ self.assertEqual(exception.UnexpectedError.code, resp.status_int)
+ return resp
+
+ # Exception data should not be in the message when debug is False
+ self.config_fixture.config(debug=False)
+ self.assertNotIn(exception_str, do_request().body)
+
+ # Exception data should be in the message when debug is True
+ self.config_fixture.config(debug=True)
+ self.assertIn(exception_str, do_request().body)
+
+ def test_middleware_local_config(self):
+ class FakeMiddleware(wsgi.Middleware):
+ def __init__(self, *args, **kwargs):
+ self.kwargs = kwargs
+
+ factory = FakeMiddleware.factory({}, testkey="test")
+ app = factory(self.app)
+ self.assertIn("testkey", app.kwargs)
+ self.assertEqual("test", app.kwargs["testkey"])
+
+
+class LocalizedResponseTest(tests.TestCase):
+ def test_request_match_default(self):
+ # The default language if no Accept-Language is provided is None
+ req = webob.Request.blank('/')
+ self.assertIsNone(wsgi.best_match_language(req))
+
+ @mock.patch.object(oslo_i18n, 'get_available_languages')
+ def test_request_match_language_expected(self, mock_gal):
+ # If Accept-Language is a supported language, best_match_language()
+ # returns it.
+
+ language = uuid.uuid4().hex
+ mock_gal.return_value = [language]
+
+ req = webob.Request.blank('/', headers={'Accept-Language': language})
+ self.assertEqual(language, wsgi.best_match_language(req))
+
+ @mock.patch.object(oslo_i18n, 'get_available_languages')
+ def test_request_match_language_unexpected(self, mock_gal):
+ # If Accept-Language is a language we do not support,
+ # best_match_language() returns None.
+
+ supported_language = uuid.uuid4().hex
+ mock_gal.return_value = [supported_language]
+
+ request_language = uuid.uuid4().hex
+ req = webob.Request.blank(
+ '/', headers={'Accept-Language': request_language})
+ self.assertIsNone(wsgi.best_match_language(req))
+
+ def test_static_translated_string_is_lazy_translatable(self):
+ # Statically created message strings are an object that can get
+ # lazy-translated rather than a regular string.
+ self.assertNotEqual(type(exception.Unauthorized.message_format),
+ six.text_type)
+
+ @mock.patch.object(oslo_i18n, 'get_available_languages')
+ def test_get_localized_response(self, mock_gal):
+ # If the request has the Accept-Language set to a supported language
+ # and an exception is raised by the application that is translatable
+ # then the response will have the translated message.
+
+ language = uuid.uuid4().hex
+ mock_gal.return_value = [language]
+
+ # The arguments for the xlated message format have to match the args
+ # for the chosen exception (exception.NotFound)
+ xlated_msg_fmt = "Xlated NotFound, %(target)s."
+
+ # Fake out gettext.translation() to return a translator for our
+ # expected language and a passthrough translator for other langs.
+
+ def fake_translation(*args, **kwargs):
+ class IdentityTranslator(object):
+ def ugettext(self, msgid):
+ return msgid
+
+ gettext = ugettext
+
+ class LangTranslator(object):
+ def ugettext(self, msgid):
+ if msgid == exception.NotFound.message_format:
+ return xlated_msg_fmt
+ return msgid
+
+ gettext = ugettext
+
+ if language in kwargs.get('languages', []):
+ return LangTranslator()
+ return IdentityTranslator()
+
+ with mock.patch.object(gettext, 'translation',
+ side_effect=fake_translation) as xlation_mock:
+ target = uuid.uuid4().hex
+
+ # Fake app raises NotFound exception to simulate Keystone raising.
+
+ class FakeApp(wsgi.Application):
+ def index(self, context):
+ raise exception.NotFound(target=target)
+
+ # Make the request with Accept-Language on the app, expect an error
+ # response with the translated message.
+
+ req = webob.Request.blank('/')
+ args = {'action': 'index', 'controller': None}
+ req.environ['wsgiorg.routing_args'] = [None, args]
+ req.headers['Accept-Language'] = language
+ resp = req.get_response(FakeApp())
+
+ # Assert that the translated message appears in the response.
+
+ exp_msg = xlated_msg_fmt % dict(target=target)
+ self.assertThat(resp.json['error']['message'],
+ matchers.Equals(exp_msg))
+ self.assertThat(xlation_mock.called, matchers.Equals(True))
+
+
+class ServerTest(tests.TestCase):
+
+ def setUp(self):
+ super(ServerTest, self).setUp()
+ self.host = '127.0.0.1'
+ self.port = '1234'
+
+ @mock.patch('eventlet.listen')
+ @mock.patch('socket.getaddrinfo')
+ def test_keepalive_unset(self, mock_getaddrinfo, mock_listen):
+ mock_getaddrinfo.return_value = [(1, 2, 3, 4, 5)]
+ mock_sock_dup = mock_listen.return_value.dup.return_value
+
+ server = environment.Server(mock.MagicMock(), host=self.host,
+ port=self.port)
+ server.start()
+ self.addCleanup(server.stop)
+ self.assertTrue(mock_listen.called)
+ self.assertFalse(mock_sock_dup.setsockopt.called)
+
+ @mock.patch('eventlet.listen')
+ @mock.patch('socket.getaddrinfo')
+ def test_keepalive_set(self, mock_getaddrinfo, mock_listen):
+ mock_getaddrinfo.return_value = [(1, 2, 3, 4, 5)]
+ mock_sock_dup = mock_listen.return_value.dup.return_value
+
+ server = environment.Server(mock.MagicMock(), host=self.host,
+ port=self.port, keepalive=True)
+ server.start()
+ self.addCleanup(server.stop)
+ mock_sock_dup.setsockopt.assert_called_once_with(socket.SOL_SOCKET,
+ socket.SO_KEEPALIVE,
+ 1)
+ self.assertTrue(mock_listen.called)
+
+ @mock.patch('eventlet.listen')
+ @mock.patch('socket.getaddrinfo')
+ def test_keepalive_and_keepidle_set(self, mock_getaddrinfo, mock_listen):
+ mock_getaddrinfo.return_value = [(1, 2, 3, 4, 5)]
+ mock_sock_dup = mock_listen.return_value.dup.return_value
+
+ server = environment.Server(mock.MagicMock(), host=self.host,
+ port=self.port, keepalive=True,
+ keepidle=1)
+ server.start()
+ self.addCleanup(server.stop)
+
+ self.assertEqual(2, mock_sock_dup.setsockopt.call_count)
+
+ # Test the last set of call args i.e. for the keepidle
+ mock_sock_dup.setsockopt.assert_called_with(socket.IPPROTO_TCP,
+ socket.TCP_KEEPIDLE,
+ 1)
+
+ self.assertTrue(mock_listen.called)
diff --git a/keystone-moon/keystone/tests/unit/tests/__init__.py b/keystone-moon/keystone/tests/unit/tests/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/tests/__init__.py
diff --git a/keystone-moon/keystone/tests/unit/tests/test_core.py b/keystone-moon/keystone/tests/unit/tests/test_core.py
new file mode 100644
index 00000000..86c91a8d
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/tests/test_core.py
@@ -0,0 +1,62 @@
+# Copyright 2014 IBM Corp.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import sys
+import warnings
+
+from oslo_log import log
+from sqlalchemy import exc
+from testtools import matchers
+
+from keystone.tests import unit as tests
+
+
+LOG = log.getLogger(__name__)
+
+
+class BaseTestTestCase(tests.BaseTestCase):
+
+ def test_unexpected_exit(self):
+ # if a test calls sys.exit it raises rather than exiting.
+ self.assertThat(lambda: sys.exit(),
+ matchers.raises(tests.UnexpectedExit))
+
+
+class TestTestCase(tests.TestCase):
+
+ def test_bad_log(self):
+ # If the arguments are invalid for the string in a log it raises an
+ # exception during testing.
+ self.assertThat(
+ lambda: LOG.warn('String %(p1)s %(p2)s', {'p1': 'something'}),
+ matchers.raises(tests.BadLog))
+
+ def test_sa_warning(self):
+ self.assertThat(
+ lambda: warnings.warn('test sa warning error', exc.SAWarning),
+ matchers.raises(exc.SAWarning))
+
+ def test_deprecations(self):
+ # If any deprecation warnings occur during testing it's raised as
+ # exception.
+
+ def use_deprecated():
+ # DeprecationWarning: BaseException.message has been deprecated as
+ # of Python 2.6
+ try:
+ raise Exception('something')
+ except Exception as e:
+ e.message
+
+ self.assertThat(use_deprecated, matchers.raises(DeprecationWarning))
diff --git a/keystone-moon/keystone/tests/unit/tests/test_utils.py b/keystone-moon/keystone/tests/unit/tests/test_utils.py
new file mode 100644
index 00000000..22c485c0
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/tests/test_utils.py
@@ -0,0 +1,37 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from testtools import matchers
+from testtools import testcase
+
+from keystone.tests.unit import utils
+
+
+class TestWipDecorator(testcase.TestCase):
+
+ def test_raises_SkipError_when_broken_test_fails(self):
+
+ @utils.wip('waiting on bug #000000')
+ def test():
+ raise Exception('i expected a failure - this is a WIP')
+
+ e = self.assertRaises(testcase.TestSkipped, test)
+ self.assertThat(str(e), matchers.Contains('#000000'))
+
+ def test_raises_AssertionError_when_test_passes(self):
+
+ @utils.wip('waiting on bug #000000')
+ def test():
+ pass # literally
+
+ e = self.assertRaises(AssertionError, test)
+ self.assertThat(str(e), matchers.Contains('#000000'))
diff --git a/keystone-moon/keystone/tests/unit/token/__init__.py b/keystone-moon/keystone/tests/unit/token/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/token/__init__.py
diff --git a/keystone-moon/keystone/tests/unit/token/test_fernet_provider.py b/keystone-moon/keystone/tests/unit/token/test_fernet_provider.py
new file mode 100644
index 00000000..23fc0214
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/token/test_fernet_provider.py
@@ -0,0 +1,183 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import datetime
+import uuid
+
+from oslo_utils import timeutils
+
+from keystone.common import config
+from keystone import exception
+from keystone.tests import unit as tests
+from keystone.tests.unit import ksfixtures
+from keystone.token import provider
+from keystone.token.providers import fernet
+from keystone.token.providers.fernet import token_formatters
+
+
+CONF = config.CONF
+
+
+class TestFernetTokenProvider(tests.TestCase):
+ def setUp(self):
+ super(TestFernetTokenProvider, self).setUp()
+ self.useFixture(ksfixtures.KeyRepository(self.config_fixture))
+ self.provider = fernet.Provider()
+
+ def test_get_token_id_raises_not_implemented(self):
+ """Test that an exception is raised when calling _get_token_id."""
+ token_data = {}
+ self.assertRaises(exception.NotImplemented,
+ self.provider._get_token_id, token_data)
+
+ def test_invalid_v3_token_raises_401(self):
+ self.assertRaises(
+ exception.Unauthorized,
+ self.provider.validate_v3_token,
+ uuid.uuid4().hex)
+
+ def test_invalid_v2_token_raises_401(self):
+ self.assertRaises(
+ exception.Unauthorized,
+ self.provider.validate_v2_token,
+ uuid.uuid4().hex)
+
+
+class TestPayloads(tests.TestCase):
+ def test_uuid_hex_to_byte_conversions(self):
+ payload_cls = token_formatters.BasePayload
+
+ expected_hex_uuid = uuid.uuid4().hex
+ uuid_obj = uuid.UUID(expected_hex_uuid)
+ expected_uuid_in_bytes = uuid_obj.bytes
+ actual_uuid_in_bytes = payload_cls.convert_uuid_hex_to_bytes(
+ expected_hex_uuid)
+ self.assertEqual(expected_uuid_in_bytes, actual_uuid_in_bytes)
+ actual_hex_uuid = payload_cls.convert_uuid_bytes_to_hex(
+ expected_uuid_in_bytes)
+ self.assertEqual(expected_hex_uuid, actual_hex_uuid)
+
+ def test_time_string_to_int_conversions(self):
+ payload_cls = token_formatters.BasePayload
+
+ expected_time_str = timeutils.isotime()
+ time_obj = timeutils.parse_isotime(expected_time_str)
+ expected_time_int = (
+ (timeutils.normalize_time(time_obj) -
+ datetime.datetime.utcfromtimestamp(0)).total_seconds())
+
+ actual_time_int = payload_cls._convert_time_string_to_int(
+ expected_time_str)
+ self.assertEqual(expected_time_int, actual_time_int)
+
+ actual_time_str = payload_cls._convert_int_to_time_string(
+ actual_time_int)
+ self.assertEqual(expected_time_str, actual_time_str)
+
+ def test_unscoped_payload(self):
+ exp_user_id = uuid.uuid4().hex
+ exp_methods = ['password']
+ exp_expires_at = timeutils.isotime(timeutils.utcnow())
+ exp_audit_ids = [provider.random_urlsafe_str()]
+
+ payload = token_formatters.UnscopedPayload.assemble(
+ exp_user_id, exp_methods, exp_expires_at, exp_audit_ids)
+
+ (user_id, methods, expires_at, audit_ids) = (
+ token_formatters.UnscopedPayload.disassemble(payload))
+
+ self.assertEqual(exp_user_id, user_id)
+ self.assertEqual(exp_methods, methods)
+ self.assertEqual(exp_expires_at, expires_at)
+ self.assertEqual(exp_audit_ids, audit_ids)
+
+ def test_project_scoped_payload(self):
+ exp_user_id = uuid.uuid4().hex
+ exp_methods = ['password']
+ exp_project_id = uuid.uuid4().hex
+ exp_expires_at = timeutils.isotime(timeutils.utcnow())
+ exp_audit_ids = [provider.random_urlsafe_str()]
+
+ payload = token_formatters.ProjectScopedPayload.assemble(
+ exp_user_id, exp_methods, exp_project_id, exp_expires_at,
+ exp_audit_ids)
+
+ (user_id, methods, project_id, expires_at, audit_ids) = (
+ token_formatters.ProjectScopedPayload.disassemble(payload))
+
+ self.assertEqual(exp_user_id, user_id)
+ self.assertEqual(exp_methods, methods)
+ self.assertEqual(exp_project_id, project_id)
+ self.assertEqual(exp_expires_at, expires_at)
+ self.assertEqual(exp_audit_ids, audit_ids)
+
+ def test_domain_scoped_payload(self):
+ exp_user_id = uuid.uuid4().hex
+ exp_methods = ['password']
+ exp_domain_id = uuid.uuid4().hex
+ exp_expires_at = timeutils.isotime(timeutils.utcnow())
+ exp_audit_ids = [provider.random_urlsafe_str()]
+
+ payload = token_formatters.DomainScopedPayload.assemble(
+ exp_user_id, exp_methods, exp_domain_id, exp_expires_at,
+ exp_audit_ids)
+
+ (user_id, methods, domain_id, expires_at, audit_ids) = (
+ token_formatters.DomainScopedPayload.disassemble(payload))
+
+ self.assertEqual(exp_user_id, user_id)
+ self.assertEqual(exp_methods, methods)
+ self.assertEqual(exp_domain_id, domain_id)
+ self.assertEqual(exp_expires_at, expires_at)
+ self.assertEqual(exp_audit_ids, audit_ids)
+
+ def test_domain_scoped_payload_with_default_domain(self):
+ exp_user_id = uuid.uuid4().hex
+ exp_methods = ['password']
+ exp_domain_id = CONF.identity.default_domain_id
+ exp_expires_at = timeutils.isotime(timeutils.utcnow())
+ exp_audit_ids = [provider.random_urlsafe_str()]
+
+ payload = token_formatters.DomainScopedPayload.assemble(
+ exp_user_id, exp_methods, exp_domain_id, exp_expires_at,
+ exp_audit_ids)
+
+ (user_id, methods, domain_id, expires_at, audit_ids) = (
+ token_formatters.DomainScopedPayload.disassemble(payload))
+
+ self.assertEqual(exp_user_id, user_id)
+ self.assertEqual(exp_methods, methods)
+ self.assertEqual(exp_domain_id, domain_id)
+ self.assertEqual(exp_expires_at, expires_at)
+ self.assertEqual(exp_audit_ids, audit_ids)
+
+ def test_trust_scoped_payload(self):
+ exp_user_id = uuid.uuid4().hex
+ exp_methods = ['password']
+ exp_project_id = uuid.uuid4().hex
+ exp_expires_at = timeutils.isotime(timeutils.utcnow())
+ exp_audit_ids = [provider.random_urlsafe_str()]
+ exp_trust_id = uuid.uuid4().hex
+
+ payload = token_formatters.TrustScopedPayload.assemble(
+ exp_user_id, exp_methods, exp_project_id, exp_expires_at,
+ exp_audit_ids, exp_trust_id)
+
+ (user_id, methods, project_id, expires_at, audit_ids, trust_id) = (
+ token_formatters.TrustScopedPayload.disassemble(payload))
+
+ self.assertEqual(exp_user_id, user_id)
+ self.assertEqual(exp_methods, methods)
+ self.assertEqual(exp_project_id, project_id)
+ self.assertEqual(exp_expires_at, expires_at)
+ self.assertEqual(exp_audit_ids, audit_ids)
+ self.assertEqual(exp_trust_id, trust_id)
diff --git a/keystone-moon/keystone/tests/unit/token/test_provider.py b/keystone-moon/keystone/tests/unit/token/test_provider.py
new file mode 100644
index 00000000..e5910690
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/token/test_provider.py
@@ -0,0 +1,29 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import urllib
+
+from keystone.tests import unit
+from keystone.token import provider
+
+
+class TestRandomStrings(unit.BaseTestCase):
+ def test_strings_are_url_safe(self):
+ s = provider.random_urlsafe_str()
+ self.assertEqual(s, urllib.quote_plus(s))
+
+ def test_strings_can_be_converted_to_bytes(self):
+ s = provider.random_urlsafe_str()
+ self.assertTrue(isinstance(s, basestring))
+
+ b = provider.random_urlsafe_str_to_bytes(s)
+ self.assertTrue(isinstance(b, bytes))
diff --git a/keystone-moon/keystone/tests/unit/token/test_token_data_helper.py b/keystone-moon/keystone/tests/unit/token/test_token_data_helper.py
new file mode 100644
index 00000000..a12a22d4
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/token/test_token_data_helper.py
@@ -0,0 +1,55 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import base64
+import uuid
+
+from testtools import matchers
+
+from keystone import exception
+from keystone.tests import unit as tests
+from keystone.token.providers import common
+
+
+class TestTokenDataHelper(tests.TestCase):
+ def setUp(self):
+ super(TestTokenDataHelper, self).setUp()
+ self.load_backends()
+ self.v3_data_helper = common.V3TokenDataHelper()
+
+ def test_v3_token_data_helper_populate_audit_info_string(self):
+ token_data = {}
+ audit_info = base64.urlsafe_b64encode(uuid.uuid4().bytes)[:-2]
+ self.v3_data_helper._populate_audit_info(token_data, audit_info)
+ self.assertIn(audit_info, token_data['audit_ids'])
+ self.assertThat(token_data['audit_ids'], matchers.HasLength(2))
+
+ def test_v3_token_data_helper_populate_audit_info_none(self):
+ token_data = {}
+ self.v3_data_helper._populate_audit_info(token_data, audit_info=None)
+ self.assertThat(token_data['audit_ids'], matchers.HasLength(1))
+ self.assertNotIn(None, token_data['audit_ids'])
+
+ def test_v3_token_data_helper_populate_audit_info_list(self):
+ token_data = {}
+ audit_info = [base64.urlsafe_b64encode(uuid.uuid4().bytes)[:-2],
+ base64.urlsafe_b64encode(uuid.uuid4().bytes)[:-2]]
+ self.v3_data_helper._populate_audit_info(token_data, audit_info)
+ self.assertEqual(audit_info, token_data['audit_ids'])
+
+ def test_v3_token_data_helper_populate_audit_info_invalid(self):
+ token_data = {}
+ audit_info = dict()
+ self.assertRaises(exception.UnexpectedError,
+ self.v3_data_helper._populate_audit_info,
+ token_data=token_data,
+ audit_info=audit_info)
diff --git a/keystone-moon/keystone/tests/unit/token/test_token_model.py b/keystone-moon/keystone/tests/unit/token/test_token_model.py
new file mode 100644
index 00000000..b2474289
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/token/test_token_model.py
@@ -0,0 +1,262 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import copy
+import uuid
+
+from oslo_config import cfg
+from oslo_utils import timeutils
+
+from keystone import exception
+from keystone.models import token_model
+from keystone.tests.unit import core
+from keystone.tests.unit import test_token_provider
+
+
+CONF = cfg.CONF
+
+
+class TestKeystoneTokenModel(core.TestCase):
+ def setUp(self):
+ super(TestKeystoneTokenModel, self).setUp()
+ self.load_backends()
+ self.v2_sample_token = copy.deepcopy(
+ test_token_provider.SAMPLE_V2_TOKEN)
+ self.v3_sample_token = copy.deepcopy(
+ test_token_provider.SAMPLE_V3_TOKEN)
+
+ def test_token_model_v3(self):
+ token_data = token_model.KeystoneToken(uuid.uuid4().hex,
+ self.v3_sample_token)
+ self.assertIs(token_model.V3, token_data.version)
+ expires = timeutils.normalize_time(timeutils.parse_isotime(
+ self.v3_sample_token['token']['expires_at']))
+ issued = timeutils.normalize_time(timeutils.parse_isotime(
+ self.v3_sample_token['token']['issued_at']))
+ self.assertEqual(expires, token_data.expires)
+ self.assertEqual(issued, token_data.issued)
+ self.assertEqual(self.v3_sample_token['token']['user']['id'],
+ token_data.user_id)
+ self.assertEqual(self.v3_sample_token['token']['user']['name'],
+ token_data.user_name)
+ self.assertEqual(self.v3_sample_token['token']['user']['domain']['id'],
+ token_data.user_domain_id)
+ self.assertEqual(
+ self.v3_sample_token['token']['user']['domain']['name'],
+ token_data.user_domain_name)
+ self.assertEqual(
+ self.v3_sample_token['token']['project']['domain']['id'],
+ token_data.project_domain_id)
+ self.assertEqual(
+ self.v3_sample_token['token']['project']['domain']['name'],
+ token_data.project_domain_name)
+ self.assertEqual(self.v3_sample_token['token']['OS-TRUST:trust']['id'],
+ token_data.trust_id)
+ self.assertEqual(
+ self.v3_sample_token['token']['OS-TRUST:trust']['trustor_user_id'],
+ token_data.trustor_user_id)
+ self.assertEqual(
+ self.v3_sample_token['token']['OS-TRUST:trust']['trustee_user_id'],
+ token_data.trustee_user_id)
+ # Project Scoped Token
+ self.assertRaises(exception.UnexpectedError, getattr, token_data,
+ 'domain_id')
+ self.assertRaises(exception.UnexpectedError, getattr, token_data,
+ 'domain_name')
+ self.assertFalse(token_data.domain_scoped)
+ self.assertEqual(self.v3_sample_token['token']['project']['id'],
+ token_data.project_id)
+ self.assertEqual(self.v3_sample_token['token']['project']['name'],
+ token_data.project_name)
+ self.assertTrue(token_data.project_scoped)
+ self.assertTrue(token_data.scoped)
+ self.assertTrue(token_data.trust_scoped)
+ self.assertEqual(
+ [r['id'] for r in self.v3_sample_token['token']['roles']],
+ token_data.role_ids)
+ self.assertEqual(
+ [r['name'] for r in self.v3_sample_token['token']['roles']],
+ token_data.role_names)
+ token_data.pop('project')
+ self.assertFalse(token_data.project_scoped)
+ self.assertFalse(token_data.scoped)
+ self.assertRaises(exception.UnexpectedError, getattr, token_data,
+ 'project_id')
+ self.assertRaises(exception.UnexpectedError, getattr, token_data,
+ 'project_name')
+ self.assertFalse(token_data.project_scoped)
+ domain_id = uuid.uuid4().hex
+ domain_name = uuid.uuid4().hex
+ token_data['domain'] = {'id': domain_id,
+ 'name': domain_name}
+ self.assertEqual(domain_id, token_data.domain_id)
+ self.assertEqual(domain_name, token_data.domain_name)
+ self.assertTrue(token_data.domain_scoped)
+
+ token_data['audit_ids'] = [uuid.uuid4().hex]
+ self.assertEqual(token_data.audit_id,
+ token_data['audit_ids'][0])
+ self.assertEqual(token_data.audit_chain_id,
+ token_data['audit_ids'][0])
+ token_data['audit_ids'].append(uuid.uuid4().hex)
+ self.assertEqual(token_data.audit_chain_id,
+ token_data['audit_ids'][1])
+ del token_data['audit_ids']
+ self.assertIsNone(token_data.audit_id)
+ self.assertIsNone(token_data.audit_chain_id)
+
+ def test_token_model_v3_federated_user(self):
+ token_data = token_model.KeystoneToken(token_id=uuid.uuid4().hex,
+ token_data=self.v3_sample_token)
+ federation_data = {'identity_provider': {'id': uuid.uuid4().hex},
+ 'protocol': {'id': 'saml2'},
+ 'groups': [{'id': uuid.uuid4().hex}
+ for x in range(1, 5)]}
+
+ self.assertFalse(token_data.is_federated_user)
+ self.assertEqual([], token_data.federation_group_ids)
+ self.assertIsNone(token_data.federation_protocol_id)
+ self.assertIsNone(token_data.federation_idp_id)
+
+ token_data['user'][token_model.federation.FEDERATION] = federation_data
+
+ self.assertTrue(token_data.is_federated_user)
+ self.assertEqual([x['id'] for x in federation_data['groups']],
+ token_data.federation_group_ids)
+ self.assertEqual(federation_data['protocol']['id'],
+ token_data.federation_protocol_id)
+ self.assertEqual(federation_data['identity_provider']['id'],
+ token_data.federation_idp_id)
+
+ def test_token_model_v2_federated_user(self):
+ token_data = token_model.KeystoneToken(token_id=uuid.uuid4().hex,
+ token_data=self.v2_sample_token)
+ federation_data = {'identity_provider': {'id': uuid.uuid4().hex},
+ 'protocol': {'id': 'saml2'},
+ 'groups': [{'id': uuid.uuid4().hex}
+ for x in range(1, 5)]}
+ self.assertFalse(token_data.is_federated_user)
+ self.assertEqual([], token_data.federation_group_ids)
+ self.assertIsNone(token_data.federation_protocol_id)
+ self.assertIsNone(token_data.federation_idp_id)
+
+ token_data['user'][token_model.federation.FEDERATION] = federation_data
+
+ # Federated users should not exist in V2, the data should remain empty
+ self.assertFalse(token_data.is_federated_user)
+ self.assertEqual([], token_data.federation_group_ids)
+ self.assertIsNone(token_data.federation_protocol_id)
+ self.assertIsNone(token_data.federation_idp_id)
+
+ def test_token_model_v2(self):
+ token_data = token_model.KeystoneToken(uuid.uuid4().hex,
+ self.v2_sample_token)
+ self.assertIs(token_model.V2, token_data.version)
+ expires = timeutils.normalize_time(timeutils.parse_isotime(
+ self.v2_sample_token['access']['token']['expires']))
+ issued = timeutils.normalize_time(timeutils.parse_isotime(
+ self.v2_sample_token['access']['token']['issued_at']))
+ self.assertEqual(expires, token_data.expires)
+ self.assertEqual(issued, token_data.issued)
+ self.assertEqual(self.v2_sample_token['access']['user']['id'],
+ token_data.user_id)
+ self.assertEqual(self.v2_sample_token['access']['user']['name'],
+ token_data.user_name)
+ self.assertEqual(CONF.identity.default_domain_id,
+ token_data.user_domain_id)
+ self.assertEqual('Default', token_data.user_domain_name)
+ self.assertEqual(CONF.identity.default_domain_id,
+ token_data.project_domain_id)
+ self.assertEqual('Default',
+ token_data.project_domain_name)
+ self.assertEqual(self.v2_sample_token['access']['trust']['id'],
+ token_data.trust_id)
+ self.assertEqual(
+ self.v2_sample_token['access']['trust']['trustor_user_id'],
+ token_data.trustor_user_id)
+ self.assertEqual(
+ self.v2_sample_token['access']['trust']['impersonation'],
+ token_data.trust_impersonation)
+ self.assertEqual(
+ self.v2_sample_token['access']['trust']['trustee_user_id'],
+ token_data.trustee_user_id)
+ # Project Scoped Token
+ self.assertEqual(
+ self.v2_sample_token['access']['token']['tenant']['id'],
+ token_data.project_id)
+ self.assertEqual(
+ self.v2_sample_token['access']['token']['tenant']['name'],
+ token_data.project_name)
+ self.assertTrue(token_data.project_scoped)
+ self.assertTrue(token_data.scoped)
+ self.assertTrue(token_data.trust_scoped)
+ self.assertEqual(
+ [r['name']
+ for r in self.v2_sample_token['access']['user']['roles']],
+ token_data.role_names)
+ token_data['token'].pop('tenant')
+ self.assertFalse(token_data.scoped)
+ self.assertFalse(token_data.project_scoped)
+ self.assertFalse(token_data.domain_scoped)
+ self.assertRaises(exception.UnexpectedError, getattr, token_data,
+ 'project_id')
+ self.assertRaises(exception.UnexpectedError, getattr, token_data,
+ 'project_name')
+ self.assertRaises(exception.UnexpectedError, getattr, token_data,
+ 'project_domain_id')
+ self.assertRaises(exception.UnexpectedError, getattr, token_data,
+ 'project_domain_id')
+ # No Domain Scoped tokens in V2
+ self.assertRaises(NotImplementedError, getattr, token_data,
+ 'domain_id')
+ self.assertRaises(NotImplementedError, getattr, token_data,
+ 'domain_name')
+ token_data['domain'] = {'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex}
+ self.assertRaises(NotImplementedError, getattr, token_data,
+ 'domain_id')
+ self.assertRaises(NotImplementedError, getattr, token_data,
+ 'domain_name')
+ self.assertFalse(token_data.domain_scoped)
+
+ token_data['token']['audit_ids'] = [uuid.uuid4().hex]
+ self.assertEqual(token_data.audit_chain_id,
+ token_data['token']['audit_ids'][0])
+ token_data['token']['audit_ids'].append(uuid.uuid4().hex)
+ self.assertEqual(token_data.audit_chain_id,
+ token_data['token']['audit_ids'][1])
+ self.assertEqual(token_data.audit_id,
+ token_data['token']['audit_ids'][0])
+ del token_data['token']['audit_ids']
+ self.assertIsNone(token_data.audit_id)
+ self.assertIsNone(token_data.audit_chain_id)
+
+ def test_token_model_unknown(self):
+ self.assertRaises(exception.UnsupportedTokenVersionException,
+ token_model.KeystoneToken,
+ token_id=uuid.uuid4().hex,
+ token_data={'bogus_data': uuid.uuid4().hex})
+
+ def test_token_model_dual_scoped_token(self):
+ domain = {'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex}
+ self.v2_sample_token['access']['domain'] = domain
+ self.v3_sample_token['token']['domain'] = domain
+
+ # V2 Tokens Cannot be domain scoped, this should work
+ token_model.KeystoneToken(token_id=uuid.uuid4().hex,
+ token_data=self.v2_sample_token)
+
+ self.assertRaises(exception.UnexpectedError,
+ token_model.KeystoneToken,
+ token_id=uuid.uuid4().hex,
+ token_data=self.v3_sample_token)
diff --git a/keystone-moon/keystone/tests/unit/utils.py b/keystone-moon/keystone/tests/unit/utils.py
new file mode 100644
index 00000000..17d1de81
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/utils.py
@@ -0,0 +1,89 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""Useful utilities for tests."""
+
+import functools
+import os
+import time
+import uuid
+
+from oslo_log import log
+import six
+from testtools import testcase
+
+
+LOG = log.getLogger(__name__)
+
+TZ = None
+
+
+def timezone(func):
+ @functools.wraps(func)
+ def wrapper(*args, **kwargs):
+ tz_original = os.environ.get('TZ')
+ try:
+ if TZ:
+ os.environ['TZ'] = TZ
+ time.tzset()
+ return func(*args, **kwargs)
+ finally:
+ if TZ:
+ if tz_original:
+ os.environ['TZ'] = tz_original
+ else:
+ if 'TZ' in os.environ:
+ del os.environ['TZ']
+ time.tzset()
+ return wrapper
+
+
+def new_uuid():
+ """Return a string UUID."""
+ return uuid.uuid4().hex
+
+
+def wip(message):
+ """Mark a test as work in progress.
+
+ Based on code by Nat Pryce:
+ https://gist.github.com/npryce/997195#file-wip-py
+
+ The test will always be run. If the test fails then a TestSkipped
+ exception is raised. If the test passes an AssertionError exception
+ is raised so that the developer knows they made the test pass. This
+ is a reminder to remove the decorator.
+
+ :param message: a string message to help clarify why the test is
+ marked as a work in progress
+
+ usage:
+ >>> @wip('waiting on bug #000000')
+ >>> def test():
+ >>> pass
+
+ """
+
+ def _wip(f):
+ @six.wraps(f)
+ def run_test(*args, **kwargs):
+ try:
+ f(*args, **kwargs)
+ except Exception:
+ raise testcase.TestSkipped('work in progress test failed: ' +
+ message)
+
+ raise AssertionError('work in progress test passed: ' + message)
+
+ return run_test
+
+ return _wip