summaryrefslogtreecommitdiffstats
path: root/keystone-moon/keystone/tests
diff options
context:
space:
mode:
Diffstat (limited to 'keystone-moon/keystone/tests')
-rw-r--r--keystone-moon/keystone/tests/functional/__init__.py0
-rw-r--r--keystone-moon/keystone/tests/functional/shared/__init__.py0
-rw-r--r--keystone-moon/keystone/tests/functional/shared/test_running.py50
-rw-r--r--keystone-moon/keystone/tests/hacking/__init__.py0
-rw-r--r--keystone-moon/keystone/tests/hacking/checks.py434
-rw-r--r--keystone-moon/keystone/tests/moon/unit/test_unit_core_configuration.py6
-rw-r--r--keystone-moon/keystone/tests/unit/__init__.py8
-rw-r--r--keystone-moon/keystone/tests/unit/auth/__init__.py0
-rw-r--r--keystone-moon/keystone/tests/unit/auth/test_controllers.py98
-rw-r--r--keystone-moon/keystone/tests/unit/backend/core_ldap.py29
-rw-r--r--keystone-moon/keystone/tests/unit/backend/domain_config/core.py28
-rw-r--r--keystone-moon/keystone/tests/unit/catalog/test_core.py18
-rw-r--r--keystone-moon/keystone/tests/unit/common/test_connection_pool.py16
-rw-r--r--keystone-moon/keystone/tests/unit/common/test_injection.py57
-rw-r--r--keystone-moon/keystone/tests/unit/common/test_ldap.py88
-rw-r--r--keystone-moon/keystone/tests/unit/common/test_notifications.py180
-rw-r--r--keystone-moon/keystone/tests/unit/common/test_utils.py2
-rw-r--r--keystone-moon/keystone/tests/unit/config_files/backend_ldap_sql.conf2
-rw-r--r--keystone-moon/keystone/tests/unit/config_files/backend_multi_ldap_sql.conf2
-rw-r--r--keystone-moon/keystone/tests/unit/config_files/backend_mysql.conf2
-rw-r--r--keystone-moon/keystone/tests/unit/config_files/backend_sql.conf2
-rw-r--r--keystone-moon/keystone/tests/unit/config_files/domain_configs_default_ldap_one_sql/keystone.domain1.conf2
-rw-r--r--keystone-moon/keystone/tests/unit/config_files/domain_configs_multi_ldap/keystone.Default.conf2
-rw-r--r--keystone-moon/keystone/tests/unit/config_files/domain_configs_multi_ldap/keystone.domain1.conf2
-rw-r--r--keystone-moon/keystone/tests/unit/config_files/domain_configs_multi_ldap/keystone.domain2.conf2
-rw-r--r--keystone-moon/keystone/tests/unit/config_files/domain_configs_one_extra_sql/keystone.domain2.conf2
-rw-r--r--keystone-moon/keystone/tests/unit/config_files/domain_configs_one_sql_one_ldap/keystone.Default.conf2
-rw-r--r--keystone-moon/keystone/tests/unit/config_files/domain_configs_one_sql_one_ldap/keystone.domain1.conf2
-rw-r--r--keystone-moon/keystone/tests/unit/config_files/test_auth_plugin.conf3
-rw-r--r--keystone-moon/keystone/tests/unit/contrib/__init__.py0
-rw-r--r--keystone-moon/keystone/tests/unit/contrib/federation/__init__.py0
-rw-r--r--keystone-moon/keystone/tests/unit/contrib/federation/test_utils.py611
-rw-r--r--keystone-moon/keystone/tests/unit/core.py154
-rw-r--r--keystone-moon/keystone/tests/unit/default_fixtures.py4
-rw-r--r--keystone-moon/keystone/tests/unit/fakeldap.py43
-rw-r--r--keystone-moon/keystone/tests/unit/filtering.py31
-rw-r--r--keystone-moon/keystone/tests/unit/identity/test_core.py61
-rw-r--r--keystone-moon/keystone/tests/unit/ksfixtures/database.py29
-rw-r--r--keystone-moon/keystone/tests/unit/ksfixtures/hacking.py88
-rw-r--r--keystone-moon/keystone/tests/unit/ksfixtures/key_repository.py6
-rw-r--r--keystone-moon/keystone/tests/unit/ksfixtures/ldapdb.py36
-rw-r--r--keystone-moon/keystone/tests/unit/mapping_fixtures.py11
-rw-r--r--keystone-moon/keystone/tests/unit/rest.py7
-rw-r--r--keystone-moon/keystone/tests/unit/saml2/signed_saml2_assertion.xml12
-rw-r--r--keystone-moon/keystone/tests/unit/test_associate_project_endpoint_extension.py223
-rw-r--r--keystone-moon/keystone/tests/unit/test_auth.py115
-rw-r--r--keystone-moon/keystone/tests/unit/test_auth_plugin.py52
-rw-r--r--keystone-moon/keystone/tests/unit/test_backend.py533
-rw-r--r--keystone-moon/keystone/tests/unit/test_backend_endpoint_policy.py1
-rw-r--r--keystone-moon/keystone/tests/unit/test_backend_endpoint_policy_sql.py3
-rw-r--r--keystone-moon/keystone/tests/unit/test_backend_federation_sql.py7
-rw-r--r--keystone-moon/keystone/tests/unit/test_backend_kvs.py35
-rw-r--r--keystone-moon/keystone/tests/unit/test_backend_ldap.py572
-rw-r--r--keystone-moon/keystone/tests/unit/test_backend_ldap_pool.py4
-rw-r--r--keystone-moon/keystone/tests/unit/test_backend_rules.py4
-rw-r--r--keystone-moon/keystone/tests/unit/test_backend_sql.py150
-rw-r--r--keystone-moon/keystone/tests/unit/test_backend_templated.py124
-rw-r--r--keystone-moon/keystone/tests/unit/test_cache.py8
-rw-r--r--keystone-moon/keystone/tests/unit/test_cache_backend_mongo.py9
-rw-r--r--keystone-moon/keystone/tests/unit/test_catalog.py98
-rw-r--r--keystone-moon/keystone/tests/unit/test_cert_setup.py4
-rw-r--r--keystone-moon/keystone/tests/unit/test_cli.py14
-rw-r--r--keystone-moon/keystone/tests/unit/test_config.py6
-rw-r--r--keystone-moon/keystone/tests/unit/test_contrib_ec2.py208
-rw-r--r--keystone-moon/keystone/tests/unit/test_exception.py12
-rw-r--r--keystone-moon/keystone/tests/unit/test_hacking_checks.py16
-rw-r--r--keystone-moon/keystone/tests/unit/test_kvs.py19
-rw-r--r--keystone-moon/keystone/tests/unit/test_ldap_livetest.py22
-rw-r--r--keystone-moon/keystone/tests/unit/test_ldap_pool_livetest.py10
-rw-r--r--keystone-moon/keystone/tests/unit/test_ldap_tls_livetest.py7
-rw-r--r--keystone-moon/keystone/tests/unit/test_policy.py45
-rw-r--r--keystone-moon/keystone/tests/unit/test_revoke.py21
-rw-r--r--keystone-moon/keystone/tests/unit/test_sql_migrate_extensions.py108
-rw-r--r--keystone-moon/keystone/tests/unit/test_sql_upgrade.py403
-rw-r--r--keystone-moon/keystone/tests/unit/test_ssl.py10
-rw-r--r--keystone-moon/keystone/tests/unit/test_token_provider.py35
-rw-r--r--keystone-moon/keystone/tests/unit/test_v2.py53
-rw-r--r--keystone-moon/keystone/tests/unit/test_v2_controller.py48
-rw-r--r--keystone-moon/keystone/tests/unit/test_v2_keystoneclient.py331
-rw-r--r--keystone-moon/keystone/tests/unit/test_v3.py197
-rw-r--r--keystone-moon/keystone/tests/unit/test_v3_assignment.py739
-rw-r--r--keystone-moon/keystone/tests/unit/test_v3_auth.py843
-rw-r--r--keystone-moon/keystone/tests/unit/test_v3_catalog.py242
-rw-r--r--keystone-moon/keystone/tests/unit/test_v3_controller.py1
-rw-r--r--keystone-moon/keystone/tests/unit/test_v3_credential.py10
-rw-r--r--keystone-moon/keystone/tests/unit/test_v3_endpoint_policy.py10
-rw-r--r--keystone-moon/keystone/tests/unit/test_v3_federation.py1450
-rw-r--r--keystone-moon/keystone/tests/unit/test_v3_filters.py10
-rw-r--r--keystone-moon/keystone/tests/unit/test_v3_identity.py58
-rw-r--r--keystone-moon/keystone/tests/unit/test_v3_oauth1.py10
-rw-r--r--keystone-moon/keystone/tests/unit/test_v3_os_revoke.py13
-rw-r--r--keystone-moon/keystone/tests/unit/test_v3_protection.py186
-rw-r--r--keystone-moon/keystone/tests/unit/test_validation.py340
-rw-r--r--keystone-moon/keystone/tests/unit/test_versions.py64
-rw-r--r--keystone-moon/keystone/tests/unit/test_wsgi.py79
-rw-r--r--keystone-moon/keystone/tests/unit/tests/test_core.py17
-rw-r--r--keystone-moon/keystone/tests/unit/token/test_fernet_provider.py370
-rw-r--r--keystone-moon/keystone/tests/unit/token/test_pki_provider.py26
-rw-r--r--keystone-moon/keystone/tests/unit/token/test_pkiz_provider.py26
-rw-r--r--keystone-moon/keystone/tests/unit/token/test_provider.py7
-rw-r--r--keystone-moon/keystone/tests/unit/token/test_token_model.py6
-rw-r--r--keystone-moon/keystone/tests/unit/token/test_uuid_provider.py26
102 files changed, 7311 insertions, 2861 deletions
diff --git a/keystone-moon/keystone/tests/functional/__init__.py b/keystone-moon/keystone/tests/functional/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/keystone-moon/keystone/tests/functional/__init__.py
diff --git a/keystone-moon/keystone/tests/functional/shared/__init__.py b/keystone-moon/keystone/tests/functional/shared/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/keystone-moon/keystone/tests/functional/shared/__init__.py
diff --git a/keystone-moon/keystone/tests/functional/shared/test_running.py b/keystone-moon/keystone/tests/functional/shared/test_running.py
new file mode 100644
index 00000000..aed48ac2
--- /dev/null
+++ b/keystone-moon/keystone/tests/functional/shared/test_running.py
@@ -0,0 +1,50 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import requests
+import testtools.matchers
+
+
+is_multiple_choices = testtools.matchers.Equals(
+ requests.status_codes.codes.multiple_choices)
+is_ok = testtools.matchers.Equals(requests.status_codes.codes.ok)
+
+admin_url = 'http://localhost:35357'
+public_url = 'http://localhost:5000'
+versions = ('v2.0', 'v3')
+
+
+class TestServerRunning(testtools.TestCase):
+
+ def test_admin_responds_with_multiple_choices(self):
+ resp = requests.get(admin_url)
+ self.assertThat(resp.status_code, is_multiple_choices)
+
+ def test_admin_versions(self):
+ for version in versions:
+ resp = requests.get(admin_url + '/' + version)
+ self.assertThat(
+ resp.status_code,
+ testtools.matchers.Annotate(
+ 'failed for version %s' % version, is_ok))
+
+ def test_public_responds_with_multiple_choices(self):
+ resp = requests.get(public_url)
+ self.assertThat(resp.status_code, is_multiple_choices)
+
+ def test_public_versions(self):
+ for version in versions:
+ resp = requests.get(public_url + '/' + version)
+ self.assertThat(
+ resp.status_code,
+ testtools.matchers.Annotate(
+ 'failed for version %s' % version, is_ok))
diff --git a/keystone-moon/keystone/tests/hacking/__init__.py b/keystone-moon/keystone/tests/hacking/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/keystone-moon/keystone/tests/hacking/__init__.py
diff --git a/keystone-moon/keystone/tests/hacking/checks.py b/keystone-moon/keystone/tests/hacking/checks.py
new file mode 100644
index 00000000..17bafff3
--- /dev/null
+++ b/keystone-moon/keystone/tests/hacking/checks.py
@@ -0,0 +1,434 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""Keystone's pep8 extensions.
+
+In order to make the review process faster and easier for core devs we are
+adding some Keystone specific pep8 checks. This will catch common errors
+so that core devs don't have to.
+
+There are two types of pep8 extensions. One is a function that takes either
+a physical or logical line. The physical or logical line is the first param
+in the function definition and can be followed by other parameters supported
+by pep8. The second type is a class that parses AST trees. For more info
+please see pep8.py.
+"""
+
+import ast
+import re
+
+import six
+
+
+class BaseASTChecker(ast.NodeVisitor):
+ """Provides a simple framework for writing AST-based checks.
+
+ Subclasses should implement visit_* methods like any other AST visitor
+ implementation. When they detect an error for a particular node the
+ method should call ``self.add_error(offending_node)``. Details about
+ where in the code the error occurred will be pulled from the node
+ object.
+
+ Subclasses should also provide a class variable named CHECK_DESC to
+ be used for the human readable error message.
+
+ """
+
+ def __init__(self, tree, filename):
+ """This object is created automatically by pep8.
+
+ :param tree: an AST tree
+ :param filename: name of the file being analyzed
+ (ignored by our checks)
+ """
+ self._tree = tree
+ self._errors = []
+
+ def run(self):
+ """Called automatically by pep8."""
+ self.visit(self._tree)
+ return self._errors
+
+ def add_error(self, node, message=None):
+ """Add an error caused by a node to the list of errors for pep8."""
+ message = message or self.CHECK_DESC
+ error = (node.lineno, node.col_offset, message, self.__class__)
+ self._errors.append(error)
+
+
+class CheckForMutableDefaultArgs(BaseASTChecker):
+ """Checks for the use of mutable objects as function/method defaults.
+
+ We are only checking for list and dict literals at this time. This means
+ that a developer could specify an instance of their own and cause a bug.
+ The fix for this is probably more work than it's worth because it will
+ get caught during code review.
+
+ """
+
+ CHECK_DESC = 'K001 Using mutable as a function/method default'
+ MUTABLES = (
+ ast.List, ast.ListComp,
+ ast.Dict, ast.DictComp,
+ ast.Set, ast.SetComp,
+ ast.Call)
+
+ def visit_FunctionDef(self, node):
+ for arg in node.args.defaults:
+ if isinstance(arg, self.MUTABLES):
+ self.add_error(arg)
+
+ super(CheckForMutableDefaultArgs, self).generic_visit(node)
+
+
+def block_comments_begin_with_a_space(physical_line, line_number):
+ """There should be a space after the # of block comments.
+
+ There is already a check in pep8 that enforces this rule for
+ inline comments.
+
+ Okay: # this is a comment
+ Okay: #!/usr/bin/python
+ Okay: # this is a comment
+ K002: #this is a comment
+
+ """
+ MESSAGE = "K002 block comments should start with '# '"
+
+ # shebangs are OK
+ if line_number == 1 and physical_line.startswith('#!'):
+ return
+
+ text = physical_line.strip()
+ if text.startswith('#'): # look for block comments
+ if len(text) > 1 and not text[1].isspace():
+ return physical_line.index('#'), MESSAGE
+
+
+class CheckForAssertingNoneEquality(BaseASTChecker):
+ """Ensures that code does not use a None with assert(Not*)Equal."""
+
+ CHECK_DESC_IS = ('K003 Use self.assertIsNone(...) when comparing '
+ 'against None')
+ CHECK_DESC_ISNOT = ('K004 Use assertIsNotNone(...) when comparing '
+ ' against None')
+
+ def visit_Call(self, node):
+ # NOTE(dstanek): I wrote this in a verbose way to make it easier to
+ # read for those that have little experience with Python's AST.
+
+ if isinstance(node.func, ast.Attribute):
+ if node.func.attr == 'assertEqual':
+ for arg in node.args:
+ if isinstance(arg, ast.Name) and arg.id == 'None':
+ self.add_error(node, message=self.CHECK_DESC_IS)
+ elif node.func.attr == 'assertNotEqual':
+ for arg in node.args:
+ if isinstance(arg, ast.Name) and arg.id == 'None':
+ self.add_error(node, message=self.CHECK_DESC_ISNOT)
+
+ super(CheckForAssertingNoneEquality, self).generic_visit(node)
+
+
+class CheckForLoggingIssues(BaseASTChecker):
+
+ DEBUG_CHECK_DESC = 'K005 Using translated string in debug logging'
+ NONDEBUG_CHECK_DESC = 'K006 Not using translating helper for logging'
+ EXCESS_HELPER_CHECK_DESC = 'K007 Using hints when _ is necessary'
+ LOG_MODULES = ('logging', 'oslo_log.log')
+ I18N_MODULES = (
+ 'keystone.i18n._',
+ 'keystone.i18n._LI',
+ 'keystone.i18n._LW',
+ 'keystone.i18n._LE',
+ 'keystone.i18n._LC',
+ )
+ TRANS_HELPER_MAP = {
+ 'debug': None,
+ 'info': '_LI',
+ 'warn': '_LW',
+ 'warning': '_LW',
+ 'error': '_LE',
+ 'exception': '_LE',
+ 'critical': '_LC',
+ }
+
+ def __init__(self, tree, filename):
+ super(CheckForLoggingIssues, self).__init__(tree, filename)
+
+ self.logger_names = []
+ self.logger_module_names = []
+ self.i18n_names = {}
+
+ # NOTE(dstanek): this kinda accounts for scopes when talking
+ # about only leaf node in the graph
+ self.assignments = {}
+
+ def generic_visit(self, node):
+ """Called if no explicit visitor function exists for a node."""
+ for field, value in ast.iter_fields(node):
+ if isinstance(value, list):
+ for item in value:
+ if isinstance(item, ast.AST):
+ item._parent = node
+ self.visit(item)
+ elif isinstance(value, ast.AST):
+ value._parent = node
+ self.visit(value)
+
+ def _filter_imports(self, module_name, alias):
+ """Keeps lists of logging and i18n imports
+
+ """
+ if module_name in self.LOG_MODULES:
+ self.logger_module_names.append(alias.asname or alias.name)
+ elif module_name in self.I18N_MODULES:
+ self.i18n_names[alias.asname or alias.name] = alias.name
+
+ def visit_Import(self, node):
+ for alias in node.names:
+ self._filter_imports(alias.name, alias)
+ return super(CheckForLoggingIssues, self).generic_visit(node)
+
+ def visit_ImportFrom(self, node):
+ for alias in node.names:
+ full_name = '%s.%s' % (node.module, alias.name)
+ self._filter_imports(full_name, alias)
+ return super(CheckForLoggingIssues, self).generic_visit(node)
+
+ def _find_name(self, node):
+ """Return the fully qualified name or a Name or Attribute."""
+ if isinstance(node, ast.Name):
+ return node.id
+ elif (isinstance(node, ast.Attribute)
+ and isinstance(node.value, (ast.Name, ast.Attribute))):
+ method_name = node.attr
+ obj_name = self._find_name(node.value)
+ if obj_name is None:
+ return None
+ return obj_name + '.' + method_name
+ elif isinstance(node, six.string_types):
+ return node
+ else: # could be Subscript, Call or many more
+ return None
+
+ def visit_Assign(self, node):
+ """Look for 'LOG = logging.getLogger'
+
+ This handles the simple case:
+ name = [logging_module].getLogger(...)
+
+ - or -
+
+ name = [i18n_name](...)
+
+ And some much more comple ones:
+ name = [i18n_name](...) % X
+
+ - or -
+
+ self.name = [i18n_name](...) % X
+
+ """
+ attr_node_types = (ast.Name, ast.Attribute)
+
+ if (len(node.targets) != 1
+ or not isinstance(node.targets[0], attr_node_types)):
+ # say no to: "x, y = ..."
+ return super(CheckForLoggingIssues, self).generic_visit(node)
+
+ target_name = self._find_name(node.targets[0])
+
+ if (isinstance(node.value, ast.BinOp) and
+ isinstance(node.value.op, ast.Mod)):
+ if (isinstance(node.value.left, ast.Call) and
+ isinstance(node.value.left.func, ast.Name) and
+ node.value.left.func.id in self.i18n_names):
+ # NOTE(dstanek): this is done to match cases like:
+ # `msg = _('something %s') % x`
+ node = ast.Assign(value=node.value.left)
+
+ if not isinstance(node.value, ast.Call):
+ # node.value must be a call to getLogger
+ self.assignments.pop(target_name, None)
+ return super(CheckForLoggingIssues, self).generic_visit(node)
+
+ # is this a call to an i18n function?
+ if (isinstance(node.value.func, ast.Name)
+ and node.value.func.id in self.i18n_names):
+ self.assignments[target_name] = node.value.func.id
+ return super(CheckForLoggingIssues, self).generic_visit(node)
+
+ if (not isinstance(node.value.func, ast.Attribute)
+ or not isinstance(node.value.func.value, attr_node_types)):
+ # function must be an attribute on an object like
+ # logging.getLogger
+ return super(CheckForLoggingIssues, self).generic_visit(node)
+
+ object_name = self._find_name(node.value.func.value)
+ func_name = node.value.func.attr
+
+ if (object_name in self.logger_module_names
+ and func_name == 'getLogger'):
+ self.logger_names.append(target_name)
+
+ return super(CheckForLoggingIssues, self).generic_visit(node)
+
+ def visit_Call(self, node):
+ """Look for the 'LOG.*' calls.
+
+ """
+
+ # obj.method
+ if isinstance(node.func, ast.Attribute):
+ obj_name = self._find_name(node.func.value)
+ if isinstance(node.func.value, ast.Name):
+ method_name = node.func.attr
+ elif isinstance(node.func.value, ast.Attribute):
+ obj_name = self._find_name(node.func.value)
+ method_name = node.func.attr
+ else: # could be Subscript, Call or many more
+ return super(CheckForLoggingIssues, self).generic_visit(node)
+
+ # must be a logger instance and one of the support logging methods
+ if (obj_name not in self.logger_names
+ or method_name not in self.TRANS_HELPER_MAP):
+ return super(CheckForLoggingIssues, self).generic_visit(node)
+
+ # the call must have arguments
+ if not len(node.args):
+ return super(CheckForLoggingIssues, self).generic_visit(node)
+
+ if method_name == 'debug':
+ self._process_debug(node)
+ elif method_name in self.TRANS_HELPER_MAP:
+ self._process_non_debug(node, method_name)
+
+ return super(CheckForLoggingIssues, self).generic_visit(node)
+
+ def _process_debug(self, node):
+ msg = node.args[0] # first arg to a logging method is the msg
+
+ # if first arg is a call to a i18n name
+ if (isinstance(msg, ast.Call)
+ and isinstance(msg.func, ast.Name)
+ and msg.func.id in self.i18n_names):
+ self.add_error(msg, message=self.DEBUG_CHECK_DESC)
+
+ # if the first arg is a reference to a i18n call
+ elif (isinstance(msg, ast.Name)
+ and msg.id in self.assignments
+ and not self._is_raised_later(node, msg.id)):
+ self.add_error(msg, message=self.DEBUG_CHECK_DESC)
+
+ def _process_non_debug(self, node, method_name):
+ msg = node.args[0] # first arg to a logging method is the msg
+
+ # if first arg is a call to a i18n name
+ if isinstance(msg, ast.Call):
+ try:
+ func_name = msg.func.id
+ except AttributeError:
+ # in the case of logging only an exception, the msg function
+ # will not have an id associated with it, for instance:
+ # LOG.warning(six.text_type(e))
+ return
+
+ # the function name is the correct translation helper
+ # for the logging method
+ if func_name == self.TRANS_HELPER_MAP[method_name]:
+ return
+
+ # the function name is an alias for the correct translation
+ # helper for the loggine method
+ if (self.i18n_names[func_name] ==
+ self.TRANS_HELPER_MAP[method_name]):
+ return
+
+ self.add_error(msg, message=self.NONDEBUG_CHECK_DESC)
+
+ # if the first arg is not a reference to the correct i18n hint
+ elif isinstance(msg, ast.Name):
+
+ # FIXME(dstanek): to make sure more robust we should be checking
+ # all names passed into a logging method. we can't right now
+ # because:
+ # 1. We have code like this that we'll fix when dealing with the %:
+ # msg = _('....') % {}
+ # LOG.warn(msg)
+ # 2. We also do LOG.exception(e) in several places. I'm not sure
+ # exactly what we should be doing about that.
+ if msg.id not in self.assignments:
+ return
+
+ helper_method_name = self.TRANS_HELPER_MAP[method_name]
+ if (self.assignments[msg.id] != helper_method_name
+ and not self._is_raised_later(node, msg.id)):
+ self.add_error(msg, message=self.NONDEBUG_CHECK_DESC)
+ elif (self.assignments[msg.id] == helper_method_name
+ and self._is_raised_later(node, msg.id)):
+ self.add_error(msg, message=self.EXCESS_HELPER_CHECK_DESC)
+
+ def _is_raised_later(self, node, name):
+
+ def find_peers(node):
+ node_for_line = node._parent
+ for _field, value in ast.iter_fields(node._parent._parent):
+ if isinstance(value, list) and node_for_line in value:
+ return value[value.index(node_for_line) + 1:]
+ continue
+ return []
+
+ peers = find_peers(node)
+ for peer in peers:
+ if isinstance(peer, ast.Raise):
+ if (isinstance(peer.type, ast.Call) and
+ len(peer.type.args) > 0 and
+ isinstance(peer.type.args[0], ast.Name) and
+ name in (a.id for a in peer.type.args)):
+ return True
+ else:
+ return False
+ elif isinstance(peer, ast.Assign):
+ if name in (t.id for t in peer.targets):
+ return False
+
+
+def dict_constructor_with_sequence_copy(logical_line):
+ """Should use a dict comprehension instead of a dict constructor.
+
+ PEP-0274 introduced dict comprehension with performance enhancement
+ and it also makes code more readable.
+
+ Okay: lower_res = {k.lower(): v for k, v in six.iteritems(res[1])}
+ Okay: fool = dict(a='a', b='b')
+ K008: lower_res = dict((k.lower(), v) for k, v in six.iteritems(res[1]))
+ K008: attrs = dict([(k, _from_json(v))
+ K008: dict([[i,i] for i in range(3)])
+
+ """
+ MESSAGE = ("K008 Must use a dict comprehension instead of a dict"
+ " constructor with a sequence of key-value pairs.")
+
+ dict_constructor_with_sequence_re = (
+ re.compile(r".*\bdict\((\[)?(\(|\[)(?!\{)"))
+
+ if dict_constructor_with_sequence_re.match(logical_line):
+ yield (0, MESSAGE)
+
+
+def factory(register):
+ register(CheckForMutableDefaultArgs)
+ register(block_comments_begin_with_a_space)
+ register(CheckForAssertingNoneEquality)
+ register(CheckForLoggingIssues)
+ register(dict_constructor_with_sequence_copy)
diff --git a/keystone-moon/keystone/tests/moon/unit/test_unit_core_configuration.py b/keystone-moon/keystone/tests/moon/unit/test_unit_core_configuration.py
index 0be52c18..83606ff3 100644
--- a/keystone-moon/keystone/tests/moon/unit/test_unit_core_configuration.py
+++ b/keystone-moon/keystone/tests/moon/unit/test_unit_core_configuration.py
@@ -14,12 +14,13 @@ from keystone.contrib.moon.exception import *
from keystone.tests.unit import default_fixtures
from keystone.contrib.moon.core import LogManager
from keystone.contrib.moon.core import IntraExtensionAdminManager
+from keystone.contrib.moon.core import IntraExtensionRootManager
from keystone.tests.moon.unit import *
CONF = cfg.CONF
-@dependency.requires('admin_api', 'authz_api', 'tenant_api', 'configuration_api', 'moonlog_api')
+# @dependency.requires('admin_api', 'authz_api', 'tenant_api', 'configuration_api', 'moonlog_api')
class TestConfigurationManager(tests.TestCase):
def setUp(self):
@@ -41,7 +42,8 @@ class TestConfigurationManager(tests.TestCase):
def load_extra_backends(self):
return {
"moonlog_api": LogManager(),
- "admin_api": IntraExtensionAdminManager()
+ "admin_api": IntraExtensionAdminManager(),
+ "root_api": IntraExtensionRootManager()
}
def config_overrides(self):
diff --git a/keystone-moon/keystone/tests/unit/__init__.py b/keystone-moon/keystone/tests/unit/__init__.py
index c97ce253..837afe69 100644
--- a/keystone-moon/keystone/tests/unit/__init__.py
+++ b/keystone-moon/keystone/tests/unit/__init__.py
@@ -25,11 +25,9 @@ if six.PY3:
import sys
from unittest import mock # noqa: our import detection is naive?
- sys.modules['eventlet'] = mock.Mock()
- sys.modules['eventlet.green'] = mock.Mock()
- sys.modules['eventlet.wsgi'] = mock.Mock()
- sys.modules['oslo'].messaging = mock.Mock()
- sys.modules['pycadf'] = mock.Mock()
+ sys.modules['ldappool'] = mock.Mock()
+ sys.modules['memcache'] = mock.Mock()
+ sys.modules['oslo_messaging'] = mock.Mock()
sys.modules['paste'] = mock.Mock()
# NOTE(dstanek): oslo_i18n.enable_lazy() must be called before
diff --git a/keystone-moon/keystone/tests/unit/auth/__init__.py b/keystone-moon/keystone/tests/unit/auth/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/auth/__init__.py
diff --git a/keystone-moon/keystone/tests/unit/auth/test_controllers.py b/keystone-moon/keystone/tests/unit/auth/test_controllers.py
new file mode 100644
index 00000000..76f2776a
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/auth/test_controllers.py
@@ -0,0 +1,98 @@
+# Copyright 2015 IBM Corp.
+
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import uuid
+
+import mock
+from oslo_config import cfg
+from oslo_config import fixture as config_fixture
+from oslo_utils import importutils
+from oslotest import mockpatch
+import stevedore
+from stevedore import extension
+
+from keystone.auth import controllers
+from keystone.tests import unit
+
+
+class TestLoadAuthMethod(unit.BaseTestCase):
+ def test_entrypoint_works(self):
+ method = uuid.uuid4().hex
+ plugin_name = self.getUniqueString()
+
+ # Register the method using the given plugin
+ cf = self.useFixture(config_fixture.Config())
+ cf.register_opt(cfg.StrOpt(method), group='auth')
+ cf.config(group='auth', **{method: plugin_name})
+
+ # Setup stevedore.DriverManager to return a driver for the plugin
+ extension_ = extension.Extension(
+ plugin_name, entry_point=mock.sentinel.entry_point,
+ plugin=mock.sentinel.plugin,
+ obj=mock.sentinel.driver)
+ auth_plugin_namespace = 'keystone.auth.%s' % method
+ fake_driver_manager = stevedore.DriverManager.make_test_instance(
+ extension_, namespace=auth_plugin_namespace)
+
+ driver_manager_mock = self.useFixture(mockpatch.PatchObject(
+ stevedore, 'DriverManager', return_value=fake_driver_manager)).mock
+
+ driver = controllers.load_auth_method(method)
+
+ self.assertEqual(auth_plugin_namespace, fake_driver_manager.namespace)
+ driver_manager_mock.assert_called_once_with(
+ auth_plugin_namespace, plugin_name, invoke_on_load=True)
+ self.assertIs(driver, mock.sentinel.driver)
+
+ def test_entrypoint_fails_import_works(self):
+ method = uuid.uuid4().hex
+ plugin_name = self.getUniqueString()
+
+ # Register the method using the given plugin
+ cf = self.useFixture(config_fixture.Config())
+ cf.register_opt(cfg.StrOpt(method), group='auth')
+ cf.config(group='auth', **{method: plugin_name})
+
+ # stevedore.DriverManager raises RuntimeError if it can't load the
+ # driver.
+ self.useFixture(mockpatch.PatchObject(
+ stevedore, 'DriverManager', side_effect=RuntimeError))
+
+ self.useFixture(mockpatch.PatchObject(
+ importutils, 'import_object', return_value=mock.sentinel.driver))
+
+ driver = controllers.load_auth_method(method)
+ self.assertIs(driver, mock.sentinel.driver)
+
+ def test_entrypoint_fails_import_fails(self):
+ method = uuid.uuid4().hex
+ plugin_name = self.getUniqueString()
+
+ # Register the method using the given plugin
+ cf = self.useFixture(config_fixture.Config())
+ cf.register_opt(cfg.StrOpt(method), group='auth')
+ cf.config(group='auth', **{method: plugin_name})
+
+ # stevedore.DriverManager raises RuntimeError if it can't load the
+ # driver.
+ self.useFixture(mockpatch.PatchObject(
+ stevedore, 'DriverManager', side_effect=RuntimeError))
+
+ class TestException(Exception):
+ pass
+
+ self.useFixture(mockpatch.PatchObject(
+ importutils, 'import_object', side_effect=TestException))
+
+ self.assertRaises(TestException, controllers.load_auth_method, method)
diff --git a/keystone-moon/keystone/tests/unit/backend/core_ldap.py b/keystone-moon/keystone/tests/unit/backend/core_ldap.py
index 9d6b23e1..a6cd0802 100644
--- a/keystone-moon/keystone/tests/unit/backend/core_ldap.py
+++ b/keystone-moon/keystone/tests/unit/backend/core_ldap.py
@@ -17,7 +17,6 @@ from oslo_config import cfg
from keystone.common import cache
from keystone.common import ldap as common_ldap
from keystone.common.ldap import core as common_ldap_core
-from keystone.common import sql
from keystone.tests import unit as tests
from keystone.tests.unit import default_fixtures
from keystone.tests.unit import fakeldap
@@ -57,19 +56,13 @@ class BaseBackendLdapCommon(object):
for shelf in fakeldap.FakeShelves:
fakeldap.FakeShelves[shelf].clear()
- def reload_backends(self, domain_id):
- # Only one backend unless we are using separate domain backends
- self.load_backends()
-
def get_config(self, domain_id):
# Only one conf structure unless we are using separate domain backends
return CONF
def config_overrides(self):
super(BaseBackendLdapCommon, self).config_overrides()
- self.config_fixture.config(
- group='identity',
- driver='keystone.identity.backends.ldap.Identity')
+ self.config_fixture.config(group='identity', driver='ldap')
def config_files(self):
config_files = super(BaseBackendLdapCommon, self).config_files()
@@ -116,17 +109,13 @@ class BaseBackendLdapIdentitySqlEverythingElse(tests.SQLDriverOverrides):
return config_files
def setUp(self):
- self.useFixture(database.Database())
+ sqldb = self.useFixture(database.Database())
super(BaseBackendLdapIdentitySqlEverythingElse, self).setUp()
self.clear_database()
self.load_backends()
cache.configure_cache_region(cache.REGION)
- self.engine = sql.get_engine()
- self.addCleanup(sql.cleanup)
-
- sql.ModelBase.metadata.create_all(bind=self.engine)
- self.addCleanup(sql.ModelBase.metadata.drop_all, bind=self.engine)
+ sqldb.recreate()
self.load_fixtures(default_fixtures)
# defaulted by the data load
self.user_foo['enabled'] = True
@@ -134,15 +123,9 @@ class BaseBackendLdapIdentitySqlEverythingElse(tests.SQLDriverOverrides):
def config_overrides(self):
super(BaseBackendLdapIdentitySqlEverythingElse,
self).config_overrides()
- self.config_fixture.config(
- group='identity',
- driver='keystone.identity.backends.ldap.Identity')
- self.config_fixture.config(
- group='resource',
- driver='keystone.resource.backends.sql.Resource')
- self.config_fixture.config(
- group='assignment',
- driver='keystone.assignment.backends.sql.Assignment')
+ self.config_fixture.config(group='identity', driver='ldap')
+ self.config_fixture.config(group='resource', driver='sql')
+ self.config_fixture.config(group='assignment', driver='sql')
class BaseBackendLdapIdentitySqlEverythingElseWithMapping(object):
diff --git a/keystone-moon/keystone/tests/unit/backend/domain_config/core.py b/keystone-moon/keystone/tests/unit/backend/domain_config/core.py
index da2e9bd9..c53d99b7 100644
--- a/keystone-moon/keystone/tests/unit/backend/domain_config/core.py
+++ b/keystone-moon/keystone/tests/unit/backend/domain_config/core.py
@@ -17,6 +17,7 @@ import mock
from testtools import matchers
from keystone import exception
+from keystone.tests import unit as tests
class DomainConfigTests(object):
@@ -521,3 +522,30 @@ class DomainConfigTests(object):
self.assertFalse(mock_log.warn.called)
# The escaping '%' should have been removed
self.assertEqual('my_url/%(password)s', res['ldap']['url'])
+
+ @tests.skip_if_cache_disabled('domain_config')
+ def test_cache_layer_get_sensitive_config(self):
+ config = {'ldap': {'url': uuid.uuid4().hex,
+ 'user_tree_dn': uuid.uuid4().hex,
+ 'password': uuid.uuid4().hex},
+ 'identity': {'driver': uuid.uuid4().hex}}
+ self.domain_config_api.create_config(self.domain['id'], config)
+ # cache the result
+ res = self.domain_config_api.get_config_with_sensitive_info(
+ self.domain['id'])
+ self.assertEqual(config, res)
+
+ # delete, bypassing domain config manager api
+ self.domain_config_api.delete_config_options(self.domain['id'])
+ self.domain_config_api.delete_config_options(self.domain['id'],
+ sensitive=True)
+
+ self.assertDictEqual(
+ res, self.domain_config_api.get_config_with_sensitive_info(
+ self.domain['id']))
+ self.domain_config_api.get_config_with_sensitive_info.invalidate(
+ self.domain_config_api, self.domain['id'])
+ self.assertDictEqual(
+ {},
+ self.domain_config_api.get_config_with_sensitive_info(
+ self.domain['id']))
diff --git a/keystone-moon/keystone/tests/unit/catalog/test_core.py b/keystone-moon/keystone/tests/unit/catalog/test_core.py
index 99a34280..2f334bb6 100644
--- a/keystone-moon/keystone/tests/unit/catalog/test_core.py
+++ b/keystone-moon/keystone/tests/unit/catalog/test_core.py
@@ -11,16 +11,16 @@
# under the License.
from oslo_config import cfg
-import testtools
from keystone.catalog import core
from keystone import exception
+from keystone.tests import unit
CONF = cfg.CONF
-class FormatUrlTests(testtools.TestCase):
+class FormatUrlTests(unit.BaseTestCase):
def test_successful_formatting(self):
url_template = ('http://$(public_bind_host)s:$(admin_port)d/'
@@ -72,3 +72,17 @@ class FormatUrlTests(testtools.TestCase):
core.format_url,
url_template,
values)
+
+ def test_substitution_with_allowed_keyerror(self):
+ # No value of 'tenant_id' is passed into url_template.
+ # mod: format_url will return None instead of raising
+ # "MalformedEndpoint" exception.
+ # This is intentional behavior since we don't want to skip
+ # all the later endpoints once there is an URL of endpoint
+ # trying to replace 'tenant_id' with None.
+ url_template = ('http://$(public_bind_host)s:$(admin_port)d/'
+ '$(tenant_id)s/$(user_id)s')
+ values = {'public_bind_host': 'server', 'admin_port': 9090,
+ 'user_id': 'B'}
+ self.assertIsNone(core.format_url(url_template, values,
+ silent_keyerror_failures=['tenant_id']))
diff --git a/keystone-moon/keystone/tests/unit/common/test_connection_pool.py b/keystone-moon/keystone/tests/unit/common/test_connection_pool.py
index 74d0420c..3813e033 100644
--- a/keystone-moon/keystone/tests/unit/common/test_connection_pool.py
+++ b/keystone-moon/keystone/tests/unit/common/test_connection_pool.py
@@ -10,9 +10,11 @@
# License for the specific language governing permissions and limitations
# under the License.
+import threading
import time
import mock
+import six
from six.moves import queue
import testtools
from testtools import matchers
@@ -117,3 +119,17 @@ class TestConnectionPool(core.TestCase):
# after it is available.
connection_pool.put_nowait(conn)
_acquire_connection()
+
+
+class TestMemcacheClientOverrides(core.BaseTestCase):
+
+ def test_client_stripped_of_threading_local(self):
+ """threading.local overrides are restored for _MemcacheClient"""
+ client_class = _memcache_pool._MemcacheClient
+ # get the genuine thread._local from MRO
+ thread_local = client_class.__mro__[2]
+ self.assertTrue(thread_local is threading.local)
+ for field in six.iterkeys(thread_local.__dict__):
+ if field not in ('__dict__', '__weakref__'):
+ self.assertNotEqual(id(getattr(thread_local, field, None)),
+ id(getattr(client_class, field, None)))
diff --git a/keystone-moon/keystone/tests/unit/common/test_injection.py b/keystone-moon/keystone/tests/unit/common/test_injection.py
index 86bb3c24..b4c23a84 100644
--- a/keystone-moon/keystone/tests/unit/common/test_injection.py
+++ b/keystone-moon/keystone/tests/unit/common/test_injection.py
@@ -21,6 +21,7 @@ from keystone.tests import unit as tests
class TestDependencyInjection(tests.BaseTestCase):
def setUp(self):
super(TestDependencyInjection, self).setUp()
+ dependency.reset()
self.addCleanup(dependency.reset)
def test_dependency_injection(self):
@@ -210,62 +211,6 @@ class TestDependencyInjection(tests.BaseTestCase):
self.assertFalse(dependency._REGISTRY)
- def test_optional_dependency_not_provided(self):
- requirement_name = uuid.uuid4().hex
-
- @dependency.optional(requirement_name)
- class C1(object):
- pass
-
- c1_inst = C1()
-
- dependency.resolve_future_dependencies()
-
- self.assertIsNone(getattr(c1_inst, requirement_name))
-
- def test_optional_dependency_provided(self):
- requirement_name = uuid.uuid4().hex
-
- @dependency.optional(requirement_name)
- class C1(object):
- pass
-
- @dependency.provider(requirement_name)
- class P1(object):
- pass
-
- c1_inst = C1()
- p1_inst = P1()
-
- dependency.resolve_future_dependencies()
-
- self.assertIs(getattr(c1_inst, requirement_name), p1_inst)
-
- def test_optional_and_required(self):
- p1_name = uuid.uuid4().hex
- p2_name = uuid.uuid4().hex
- optional_name = uuid.uuid4().hex
-
- @dependency.provider(p1_name)
- @dependency.requires(p2_name)
- @dependency.optional(optional_name)
- class P1(object):
- pass
-
- @dependency.provider(p2_name)
- @dependency.requires(p1_name)
- class P2(object):
- pass
-
- p1 = P1()
- p2 = P2()
-
- dependency.resolve_future_dependencies()
-
- self.assertIs(getattr(p1, p2_name), p2)
- self.assertIs(getattr(p2, p1_name), p1)
- self.assertIsNone(getattr(p1, optional_name))
-
def test_get_provider(self):
# Can get the instance of a provider using get_provider
diff --git a/keystone-moon/keystone/tests/unit/common/test_ldap.py b/keystone-moon/keystone/tests/unit/common/test_ldap.py
index 41568890..d3ce8cd2 100644
--- a/keystone-moon/keystone/tests/unit/common/test_ldap.py
+++ b/keystone-moon/keystone/tests/unit/common/test_ldap.py
@@ -11,23 +11,24 @@
# License for the specific language governing permissions and limitations
# under the License.
+import os
+import tempfile
import uuid
+import fixtures
import ldap.dn
import mock
from oslo_config import cfg
from testtools import matchers
-import os
-import shutil
-import tempfile
-
+from keystone.common import driver_hints
from keystone.common import ldap as ks_ldap
from keystone.common.ldap import core as common_ldap_core
from keystone.tests import unit as tests
from keystone.tests.unit import default_fixtures
from keystone.tests.unit import fakeldap
+
CONF = cfg.CONF
@@ -218,9 +219,7 @@ class LDAPDeleteTreeTest(tests.TestCase):
def config_overrides(self):
super(LDAPDeleteTreeTest, self).config_overrides()
- self.config_fixture.config(
- group='identity',
- driver='keystone.identity.backends.ldap.Identity')
+ self.config_fixture.config(group='identity', driver='ldap')
def config_files(self):
config_files = super(LDAPDeleteTreeTest, self).config_files()
@@ -311,8 +310,7 @@ class SslTlsTest(tests.TestCase):
def test_certdir_trust_tls(self):
# We need this to actually exist, so we create a tempdir.
- certdir = tempfile.mkdtemp()
- self.addCleanup(shutil.rmtree, certdir)
+ certdir = self.useFixture(fixtures.TempDir()).path
self.config_fixture.config(group='ldap',
url='ldap://localhost',
use_tls=True,
@@ -340,8 +338,7 @@ class SslTlsTest(tests.TestCase):
def test_certdir_trust_ldaps(self):
# We need this to actually exist, so we create a tempdir.
- certdir = tempfile.mkdtemp()
- self.addCleanup(shutil.rmtree, certdir)
+ certdir = self.useFixture(fixtures.TempDir()).path
self.config_fixture.config(group='ldap',
url='ldaps://localhost',
use_tls=False,
@@ -372,9 +369,7 @@ class LDAPPagedResultsTest(tests.TestCase):
def config_overrides(self):
super(LDAPPagedResultsTest, self).config_overrides()
- self.config_fixture.config(
- group='identity',
- driver='keystone.identity.backends.ldap.Identity')
+ self.config_fixture.config(group='identity', driver='ldap')
def config_files(self):
config_files = super(LDAPPagedResultsTest, self).config_files()
@@ -500,3 +495,68 @@ class CommonLdapTestCase(tests.BaseTestCase):
py_result = ks_ldap.convert_ldap_result(result)
# The user name should still be a string value.
self.assertEqual(user_name, py_result[0][1]['user_name'][0])
+
+
+class LDAPFilterQueryCompositionTest(tests.TestCase):
+ """These test cases test LDAP filter generation."""
+
+ def setUp(self):
+ super(LDAPFilterQueryCompositionTest, self).setUp()
+
+ self.base_ldap = ks_ldap.BaseLdap(self.config_fixture.conf)
+
+ # The tests need an attribute mapping to use.
+ self.attribute_name = uuid.uuid4().hex
+ self.filter_attribute_name = uuid.uuid4().hex
+ self.base_ldap.attribute_mapping = {
+ self.attribute_name: self.filter_attribute_name
+ }
+
+ def test_return_query_with_no_hints(self):
+ hints = driver_hints.Hints()
+ # NOTE: doesn't have to be a real query, we just need to make sure the
+ # same string is returned if there are no hints.
+ query = uuid.uuid4().hex
+ self.assertEqual(query,
+ self.base_ldap.filter_query(hints=hints, query=query))
+
+ # make sure the default query is an empty string
+ self.assertEqual('', self.base_ldap.filter_query(hints=hints))
+
+ def test_filter_with_empty_query_and_hints_set(self):
+ hints = driver_hints.Hints()
+ username = uuid.uuid4().hex
+ hints.add_filter(name=self.attribute_name,
+ value=username,
+ comparator='equals',
+ case_sensitive=False)
+ expected_ldap_filter = '(&(%s=%s))' % (
+ self.filter_attribute_name, username)
+ self.assertEqual(expected_ldap_filter,
+ self.base_ldap.filter_query(hints=hints))
+
+ def test_filter_with_both_query_and_hints_set(self):
+ hints = driver_hints.Hints()
+ # NOTE: doesn't have to be a real query, we just need to make sure the
+ # filter string is concatenated correctly
+ query = uuid.uuid4().hex
+ username = uuid.uuid4().hex
+ expected_result = '(&%(query)s(%(user_name_attr)s=%(username)s))' % (
+ {'query': query,
+ 'user_name_attr': self.filter_attribute_name,
+ 'username': username})
+ hints.add_filter(self.attribute_name, username)
+ self.assertEqual(expected_result,
+ self.base_ldap.filter_query(hints=hints, query=query))
+
+ def test_filter_with_hints_and_query_is_none(self):
+ hints = driver_hints.Hints()
+ username = uuid.uuid4().hex
+ hints.add_filter(name=self.attribute_name,
+ value=username,
+ comparator='equals',
+ case_sensitive=False)
+ expected_ldap_filter = '(&(%s=%s))' % (
+ self.filter_attribute_name, username)
+ self.assertEqual(expected_ldap_filter,
+ self.base_ldap.filter_query(hints=hints, query=None))
diff --git a/keystone-moon/keystone/tests/unit/common/test_notifications.py b/keystone-moon/keystone/tests/unit/common/test_notifications.py
index 55dd556d..2d872733 100644
--- a/keystone-moon/keystone/tests/unit/common/test_notifications.py
+++ b/keystone-moon/keystone/tests/unit/common/test_notifications.py
@@ -23,10 +23,9 @@ from pycadf import cadftaxonomy
from pycadf import cadftype
from pycadf import eventfactory
from pycadf import resource as cadfresource
-import testtools
-from keystone.common import dependency
from keystone import notifications
+from keystone.tests import unit
from keystone.tests.unit import test_v3
@@ -53,7 +52,7 @@ def register_callback(operation, resource_type=EXP_RESOURCE_TYPE):
return callback
-class AuditNotificationsTestCase(testtools.TestCase):
+class AuditNotificationsTestCase(unit.BaseTestCase):
def setUp(self):
super(AuditNotificationsTestCase, self).setUp()
self.config_fixture = self.useFixture(config_fixture.Config(CONF))
@@ -96,7 +95,7 @@ class AuditNotificationsTestCase(testtools.TestCase):
DISABLED_OPERATION)
-class NotificationsWrapperTestCase(testtools.TestCase):
+class NotificationsWrapperTestCase(unit.BaseTestCase):
def create_fake_ref(self):
resource_id = uuid.uuid4().hex
return resource_id, {
@@ -174,14 +173,7 @@ class NotificationsWrapperTestCase(testtools.TestCase):
self.assertFalse(callback.called)
-class NotificationsTestCase(testtools.TestCase):
- def setUp(self):
- super(NotificationsTestCase, self).setUp()
-
- # these should use self.config_fixture.config(), but they haven't
- # been registered yet
- CONF.rpc_backend = 'fake'
- CONF.notification_driver = ['fake']
+class NotificationsTestCase(unit.BaseTestCase):
def test_send_notification(self):
"""Test the private method _send_notification to ensure event_type,
@@ -324,7 +316,7 @@ class NotificationsForEntities(BaseNotificationTest):
def test_create_project(self):
project_ref = self.new_project_ref(domain_id=self.domain_id)
- self.assignment_api.create_project(project_ref['id'], project_ref)
+ self.resource_api.create_project(project_ref['id'], project_ref)
self._assert_last_note(
project_ref['id'], CREATED_OPERATION, 'project')
self._assert_last_audit(project_ref['id'], CREATED_OPERATION,
@@ -371,8 +363,8 @@ class NotificationsForEntities(BaseNotificationTest):
def test_delete_project(self):
project_ref = self.new_project_ref(domain_id=self.domain_id)
- self.assignment_api.create_project(project_ref['id'], project_ref)
- self.assignment_api.delete_project(project_ref['id'])
+ self.resource_api.create_project(project_ref['id'], project_ref)
+ self.resource_api.delete_project(project_ref['id'])
self._assert_last_note(
project_ref['id'], DELETED_OPERATION, 'project')
self._assert_last_audit(project_ref['id'], DELETED_OPERATION,
@@ -403,19 +395,19 @@ class NotificationsForEntities(BaseNotificationTest):
def test_update_domain(self):
domain_ref = self.new_domain_ref()
- self.assignment_api.create_domain(domain_ref['id'], domain_ref)
+ self.resource_api.create_domain(domain_ref['id'], domain_ref)
domain_ref['description'] = uuid.uuid4().hex
- self.assignment_api.update_domain(domain_ref['id'], domain_ref)
+ self.resource_api.update_domain(domain_ref['id'], domain_ref)
self._assert_last_note(domain_ref['id'], UPDATED_OPERATION, 'domain')
self._assert_last_audit(domain_ref['id'], UPDATED_OPERATION, 'domain',
cadftaxonomy.SECURITY_DOMAIN)
def test_delete_domain(self):
domain_ref = self.new_domain_ref()
- self.assignment_api.create_domain(domain_ref['id'], domain_ref)
+ self.resource_api.create_domain(domain_ref['id'], domain_ref)
domain_ref['enabled'] = False
- self.assignment_api.update_domain(domain_ref['id'], domain_ref)
- self.assignment_api.delete_domain(domain_ref['id'])
+ self.resource_api.update_domain(domain_ref['id'], domain_ref)
+ self.resource_api.delete_domain(domain_ref['id'])
self._assert_last_note(domain_ref['id'], DELETED_OPERATION, 'domain')
self._assert_last_audit(domain_ref['id'], DELETED_OPERATION, 'domain',
cadftaxonomy.SECURITY_DOMAIN)
@@ -542,19 +534,19 @@ class NotificationsForEntities(BaseNotificationTest):
def test_disable_domain(self):
domain_ref = self.new_domain_ref()
- self.assignment_api.create_domain(domain_ref['id'], domain_ref)
+ self.resource_api.create_domain(domain_ref['id'], domain_ref)
domain_ref['enabled'] = False
- self.assignment_api.update_domain(domain_ref['id'], domain_ref)
+ self.resource_api.update_domain(domain_ref['id'], domain_ref)
self._assert_notify_sent(domain_ref['id'], 'disabled', 'domain',
public=False)
def test_disable_of_disabled_domain_does_not_notify(self):
domain_ref = self.new_domain_ref()
domain_ref['enabled'] = False
- self.assignment_api.create_domain(domain_ref['id'], domain_ref)
+ self.resource_api.create_domain(domain_ref['id'], domain_ref)
# The domain_ref above is not changed during the create process. We
# can use the same ref to perform the update.
- self.assignment_api.update_domain(domain_ref['id'], domain_ref)
+ self.resource_api.update_domain(domain_ref['id'], domain_ref)
self._assert_notify_not_sent(domain_ref['id'], 'disabled', 'domain',
public=False)
@@ -568,8 +560,8 @@ class NotificationsForEntities(BaseNotificationTest):
def test_update_project(self):
project_ref = self.new_project_ref(domain_id=self.domain_id)
- self.assignment_api.create_project(project_ref['id'], project_ref)
- self.assignment_api.update_project(project_ref['id'], project_ref)
+ self.resource_api.create_project(project_ref['id'], project_ref)
+ self.resource_api.update_project(project_ref['id'], project_ref)
self._assert_notify_sent(
project_ref['id'], UPDATED_OPERATION, 'project', public=True)
self._assert_last_audit(project_ref['id'], UPDATED_OPERATION,
@@ -577,27 +569,27 @@ class NotificationsForEntities(BaseNotificationTest):
def test_disable_project(self):
project_ref = self.new_project_ref(domain_id=self.domain_id)
- self.assignment_api.create_project(project_ref['id'], project_ref)
+ self.resource_api.create_project(project_ref['id'], project_ref)
project_ref['enabled'] = False
- self.assignment_api.update_project(project_ref['id'], project_ref)
+ self.resource_api.update_project(project_ref['id'], project_ref)
self._assert_notify_sent(project_ref['id'], 'disabled', 'project',
public=False)
def test_disable_of_disabled_project_does_not_notify(self):
project_ref = self.new_project_ref(domain_id=self.domain_id)
project_ref['enabled'] = False
- self.assignment_api.create_project(project_ref['id'], project_ref)
+ self.resource_api.create_project(project_ref['id'], project_ref)
# The project_ref above is not changed during the create process. We
# can use the same ref to perform the update.
- self.assignment_api.update_project(project_ref['id'], project_ref)
+ self.resource_api.update_project(project_ref['id'], project_ref)
self._assert_notify_not_sent(project_ref['id'], 'disabled', 'project',
public=False)
def test_update_project_does_not_send_disable(self):
project_ref = self.new_project_ref(domain_id=self.domain_id)
- self.assignment_api.create_project(project_ref['id'], project_ref)
+ self.resource_api.create_project(project_ref['id'], project_ref)
project_ref['enabled'] = True
- self.assignment_api.update_project(project_ref['id'], project_ref)
+ self.resource_api.update_project(project_ref['id'], project_ref)
self._assert_last_note(
project_ref['id'], UPDATED_OPERATION, 'project')
self._assert_notify_not_sent(project_ref['id'], 'disabled', 'project')
@@ -665,7 +657,7 @@ class TestEventCallbacks(test_v3.RestfulTestCase):
def test_notification_received(self):
callback = register_callback(CREATED_OPERATION, 'project')
project_ref = self.new_project_ref(domain_id=self.domain_id)
- self.assignment_api.create_project(project_ref['id'], project_ref)
+ self.resource_api.create_project(project_ref['id'], project_ref)
self.assertTrue(callback.called)
def test_notification_method_not_callable(self):
@@ -694,14 +686,14 @@ class TestEventCallbacks(test_v3.RestfulTestCase):
resource_type,
self._project_deleted_callback)
- def test_provider_event_callbacks_subscription(self):
+ def test_provider_event_callback_subscription(self):
callback_called = []
- @dependency.provider('foo_api')
+ @notifications.listener
class Foo(object):
def __init__(self):
self.event_callbacks = {
- CREATED_OPERATION: {'project': [self.foo_callback]}}
+ CREATED_OPERATION: {'project': self.foo_callback}}
def foo_callback(self, service, resource_type, operation,
payload):
@@ -710,24 +702,73 @@ class TestEventCallbacks(test_v3.RestfulTestCase):
Foo()
project_ref = self.new_project_ref(domain_id=self.domain_id)
- self.assignment_api.create_project(project_ref['id'], project_ref)
+ self.resource_api.create_project(project_ref['id'], project_ref)
self.assertEqual([True], callback_called)
+ def test_provider_event_callbacks_subscription(self):
+ callback_called = []
+
+ @notifications.listener
+ class Foo(object):
+ def __init__(self):
+ self.event_callbacks = {
+ CREATED_OPERATION: {
+ 'project': [self.callback_0, self.callback_1]}}
+
+ def callback_0(self, service, resource_type, operation, payload):
+ # uses callback_called from the closure
+ callback_called.append('cb0')
+
+ def callback_1(self, service, resource_type, operation, payload):
+ # uses callback_called from the closure
+ callback_called.append('cb1')
+
+ Foo()
+ project_ref = self.new_project_ref(domain_id=self.domain_id)
+ self.resource_api.create_project(project_ref['id'], project_ref)
+ self.assertItemsEqual(['cb1', 'cb0'], callback_called)
+
def test_invalid_event_callbacks(self):
- @dependency.provider('foo_api')
+ @notifications.listener
class Foo(object):
def __init__(self):
self.event_callbacks = 'bogus'
- self.assertRaises(ValueError, Foo)
+ self.assertRaises(AttributeError, Foo)
def test_invalid_event_callbacks_event(self):
- @dependency.provider('foo_api')
+ @notifications.listener
class Foo(object):
def __init__(self):
self.event_callbacks = {CREATED_OPERATION: 'bogus'}
- self.assertRaises(ValueError, Foo)
+ self.assertRaises(AttributeError, Foo)
+
+ def test_using_an_unbound_method_as_a_callback_fails(self):
+ # NOTE(dstanek): An unbound method is when you reference a method
+ # from a class object. You'll get a method that isn't bound to a
+ # particular instance so there is no magic 'self'. You can call it,
+ # but you have to pass in the instance manually like: C.m(C()).
+ # If you reference the method from an instance then you get a method
+ # that effectively curries the self argument for you
+ # (think functools.partial). Obviously is we don't have an
+ # instance then we can't call the method.
+ @notifications.listener
+ class Foo(object):
+ def __init__(self):
+ self.event_callbacks = {CREATED_OPERATION:
+ {'project': Foo.callback}}
+
+ def callback(self, *args):
+ pass
+
+ # TODO(dstanek): it would probably be nice to fail early using
+ # something like:
+ # self.assertRaises(TypeError, Foo)
+ Foo()
+ project_ref = self.new_project_ref(domain_id=self.domain_id)
+ self.assertRaises(TypeError, self.resource_api.create_project,
+ project_ref['id'], project_ref)
class CadfNotificationsWrapperTestCase(test_v3.RestfulTestCase):
@@ -759,13 +800,14 @@ class CadfNotificationsWrapperTestCase(test_v3.RestfulTestCase):
'action': action,
'initiator': initiator,
'event': event,
+ 'event_type': event_type,
'send_notification_called': True}
self._notifications.append(note)
self.useFixture(mockpatch.PatchObject(
notifications, '_send_audit_notification', fake_notify))
- def _assert_last_note(self, action, user_id):
+ def _assert_last_note(self, action, user_id, event_type=None):
self.assertTrue(self._notifications)
note = self._notifications[-1]
self.assertEqual(note['action'], action)
@@ -773,6 +815,8 @@ class CadfNotificationsWrapperTestCase(test_v3.RestfulTestCase):
self.assertEqual(initiator.id, user_id)
self.assertEqual(initiator.host.address, self.LOCAL_HOST)
self.assertTrue(note['send_notification_called'])
+ if event_type:
+ self.assertEqual(note['event_type'], event_type)
def _assert_event(self, role_id, project=None, domain=None,
user=None, group=None, inherit=False):
@@ -816,10 +860,10 @@ class CadfNotificationsWrapperTestCase(test_v3.RestfulTestCase):
self.assertEqual(project, event.project)
if domain:
self.assertEqual(domain, event.domain)
- if user:
- self.assertEqual(user, event.user)
if group:
self.assertEqual(group, event.group)
+ elif user:
+ self.assertEqual(user, event.user)
self.assertEqual(role_id, event.role)
self.assertEqual(inherit, event.inherited_to_projects)
@@ -857,12 +901,16 @@ class CadfNotificationsWrapperTestCase(test_v3.RestfulTestCase):
user=None, group=None):
self.put(url)
action = "%s.%s" % (CREATED_OPERATION, self.ROLE_ASSIGNMENT)
- self._assert_last_note(action, self.user_id)
+ event_type = '%s.%s.%s' % (notifications.SERVICE,
+ self.ROLE_ASSIGNMENT, CREATED_OPERATION)
+ self._assert_last_note(action, self.user_id, event_type)
self._assert_event(role, project, domain, user, group)
self.delete(url)
action = "%s.%s" % (DELETED_OPERATION, self.ROLE_ASSIGNMENT)
- self._assert_last_note(action, self.user_id)
- self._assert_event(role, project, domain, user, group)
+ event_type = '%s.%s.%s' % (notifications.SERVICE,
+ self.ROLE_ASSIGNMENT, DELETED_OPERATION)
+ self._assert_last_note(action, self.user_id, event_type)
+ self._assert_event(role, project, domain, user, None)
def test_user_project_grant(self):
url = ('/projects/%s/users/%s/roles/%s' %
@@ -874,14 +922,50 @@ class CadfNotificationsWrapperTestCase(test_v3.RestfulTestCase):
def test_group_domain_grant(self):
group_ref = self.new_group_ref(domain_id=self.domain_id)
group = self.identity_api.create_group(group_ref)
+ self.identity_api.add_user_to_group(self.user_id, group['id'])
url = ('/domains/%s/groups/%s/roles/%s' %
(self.domain_id, group['id'], self.role_id))
self._test_role_assignment(url, self.role_id,
domain=self.domain_id,
+ user=self.user_id,
group=group['id'])
+ def test_add_role_to_user_and_project(self):
+ # A notification is sent when add_role_to_user_and_project is called on
+ # the assignment manager.
+
+ project_ref = self.new_project_ref(self.domain_id)
+ project = self.resource_api.create_project(
+ project_ref['id'], project_ref)
+ tenant_id = project['id']
+
+ self.assignment_api.add_role_to_user_and_project(
+ self.user_id, tenant_id, self.role_id)
+
+ self.assertTrue(self._notifications)
+ note = self._notifications[-1]
+ self.assertEqual(note['action'], 'created.role_assignment')
+ self.assertTrue(note['send_notification_called'])
+
+ self._assert_event(self.role_id, project=tenant_id, user=self.user_id)
+
+ def test_remove_role_from_user_and_project(self):
+ # A notification is sent when remove_role_from_user_and_project is
+ # called on the assignment manager.
+
+ self.assignment_api.remove_role_from_user_and_project(
+ self.user_id, self.project_id, self.role_id)
+
+ self.assertTrue(self._notifications)
+ note = self._notifications[-1]
+ self.assertEqual(note['action'], 'deleted.role_assignment')
+ self.assertTrue(note['send_notification_called'])
+
+ self._assert_event(self.role_id, project=self.project_id,
+ user=self.user_id)
+
-class TestCallbackRegistration(testtools.TestCase):
+class TestCallbackRegistration(unit.BaseTestCase):
def setUp(self):
super(TestCallbackRegistration, self).setUp()
self.mock_log = mock.Mock()
diff --git a/keystone-moon/keystone/tests/unit/common/test_utils.py b/keystone-moon/keystone/tests/unit/common/test_utils.py
index 184c8141..e8bac3c0 100644
--- a/keystone-moon/keystone/tests/unit/common/test_utils.py
+++ b/keystone-moon/keystone/tests/unit/common/test_utils.py
@@ -150,7 +150,7 @@ class UtilsTestCase(tests.BaseTestCase):
def test_pki_encoder(self):
data = {'field': 'value'}
json = jsonutils.dumps(data, cls=common_utils.PKIEncoder)
- expected_json = b'{"field":"value"}'
+ expected_json = '{"field":"value"}'
self.assertEqual(expected_json, json)
diff --git a/keystone-moon/keystone/tests/unit/config_files/backend_ldap_sql.conf b/keystone-moon/keystone/tests/unit/config_files/backend_ldap_sql.conf
index 8a06f2f9..2097b68b 100644
--- a/keystone-moon/keystone/tests/unit/config_files/backend_ldap_sql.conf
+++ b/keystone-moon/keystone/tests/unit/config_files/backend_ldap_sql.conf
@@ -2,7 +2,7 @@
#For a specific location file based sqlite use:
#connection = sqlite:////tmp/keystone.db
#To Test MySQL:
-#connection = mysql://keystone:keystone@localhost/keystone?charset=utf8
+#connection = mysql+pymysql://keystone:keystone@localhost/keystone?charset=utf8
#To Test PostgreSQL:
#connection = postgresql://keystone:keystone@localhost/keystone?client_encoding=utf8
idle_timeout = 200
diff --git a/keystone-moon/keystone/tests/unit/config_files/backend_multi_ldap_sql.conf b/keystone-moon/keystone/tests/unit/config_files/backend_multi_ldap_sql.conf
index 2d04d83d..5185770b 100644
--- a/keystone-moon/keystone/tests/unit/config_files/backend_multi_ldap_sql.conf
+++ b/keystone-moon/keystone/tests/unit/config_files/backend_multi_ldap_sql.conf
@@ -3,7 +3,7 @@ connection = sqlite://
#For a file based sqlite use
#connection = sqlite:////tmp/keystone.db
#To Test MySQL:
-#connection = mysql://keystone:keystone@localhost/keystone?charset=utf8
+#connection = mysql+pymysql://keystone:keystone@localhost/keystone?charset=utf8
#To Test PostgreSQL:
#connection = postgresql://keystone:keystone@localhost/keystone?client_encoding=utf8
idle_timeout = 200
diff --git a/keystone-moon/keystone/tests/unit/config_files/backend_mysql.conf b/keystone-moon/keystone/tests/unit/config_files/backend_mysql.conf
index d612f729..142ca203 100644
--- a/keystone-moon/keystone/tests/unit/config_files/backend_mysql.conf
+++ b/keystone-moon/keystone/tests/unit/config_files/backend_mysql.conf
@@ -1,4 +1,4 @@
#Used for running the Migrate tests against a live Mysql Server
#See _sql_livetest.py
[database]
-connection = mysql://keystone:keystone@localhost/keystone_test?charset=utf8
+connection = mysql+pymysql://keystone:keystone@localhost/keystone_test?charset=utf8
diff --git a/keystone-moon/keystone/tests/unit/config_files/backend_sql.conf b/keystone-moon/keystone/tests/unit/config_files/backend_sql.conf
index 9d401af3..063177bd 100644
--- a/keystone-moon/keystone/tests/unit/config_files/backend_sql.conf
+++ b/keystone-moon/keystone/tests/unit/config_files/backend_sql.conf
@@ -2,7 +2,7 @@
#For a specific location file based sqlite use:
#connection = sqlite:////tmp/keystone.db
#To Test MySQL:
-#connection = mysql://keystone:keystone@localhost/keystone?charset=utf8
+#connection = mysql+pymysql://keystone:keystone@localhost/keystone?charset=utf8
#To Test PostgreSQL:
#connection = postgresql://keystone:keystone@localhost/keystone?client_encoding=utf8
idle_timeout = 200
diff --git a/keystone-moon/keystone/tests/unit/config_files/domain_configs_default_ldap_one_sql/keystone.domain1.conf b/keystone-moon/keystone/tests/unit/config_files/domain_configs_default_ldap_one_sql/keystone.domain1.conf
index a4492a67..fecc7bea 100644
--- a/keystone-moon/keystone/tests/unit/config_files/domain_configs_default_ldap_one_sql/keystone.domain1.conf
+++ b/keystone-moon/keystone/tests/unit/config_files/domain_configs_default_ldap_one_sql/keystone.domain1.conf
@@ -2,4 +2,4 @@
# 'domain1' for use with unit tests.
[identity]
-driver = keystone.identity.backends.sql.Identity \ No newline at end of file
+driver = sql \ No newline at end of file
diff --git a/keystone-moon/keystone/tests/unit/config_files/domain_configs_multi_ldap/keystone.Default.conf b/keystone-moon/keystone/tests/unit/config_files/domain_configs_multi_ldap/keystone.Default.conf
index 7049afed..2dd86c25 100644
--- a/keystone-moon/keystone/tests/unit/config_files/domain_configs_multi_ldap/keystone.Default.conf
+++ b/keystone-moon/keystone/tests/unit/config_files/domain_configs_multi_ldap/keystone.Default.conf
@@ -11,4 +11,4 @@ password = password
suffix = cn=example,cn=com
[identity]
-driver = keystone.identity.backends.ldap.Identity \ No newline at end of file
+driver = ldap \ No newline at end of file
diff --git a/keystone-moon/keystone/tests/unit/config_files/domain_configs_multi_ldap/keystone.domain1.conf b/keystone-moon/keystone/tests/unit/config_files/domain_configs_multi_ldap/keystone.domain1.conf
index 6b7e2488..ba22cdf9 100644
--- a/keystone-moon/keystone/tests/unit/config_files/domain_configs_multi_ldap/keystone.domain1.conf
+++ b/keystone-moon/keystone/tests/unit/config_files/domain_configs_multi_ldap/keystone.domain1.conf
@@ -8,4 +8,4 @@ password = password
suffix = cn=example,cn=com
[identity]
-driver = keystone.identity.backends.ldap.Identity \ No newline at end of file
+driver = ldap \ No newline at end of file
diff --git a/keystone-moon/keystone/tests/unit/config_files/domain_configs_multi_ldap/keystone.domain2.conf b/keystone-moon/keystone/tests/unit/config_files/domain_configs_multi_ldap/keystone.domain2.conf
index 0ed68eb9..a14179e3 100644
--- a/keystone-moon/keystone/tests/unit/config_files/domain_configs_multi_ldap/keystone.domain2.conf
+++ b/keystone-moon/keystone/tests/unit/config_files/domain_configs_multi_ldap/keystone.domain2.conf
@@ -10,4 +10,4 @@ group_tree_dn = ou=UserGroups,dc=myroot,dc=org
user_tree_dn = ou=Users,dc=myroot,dc=org
[identity]
-driver = keystone.identity.backends.ldap.Identity \ No newline at end of file
+driver = ldap \ No newline at end of file
diff --git a/keystone-moon/keystone/tests/unit/config_files/domain_configs_one_extra_sql/keystone.domain2.conf b/keystone-moon/keystone/tests/unit/config_files/domain_configs_one_extra_sql/keystone.domain2.conf
index 81b44462..925b26f2 100644
--- a/keystone-moon/keystone/tests/unit/config_files/domain_configs_one_extra_sql/keystone.domain2.conf
+++ b/keystone-moon/keystone/tests/unit/config_files/domain_configs_one_extra_sql/keystone.domain2.conf
@@ -2,4 +2,4 @@
# 'domain2' for use with unit tests.
[identity]
-driver = keystone.identity.backends.sql.Identity \ No newline at end of file
+driver = sql \ No newline at end of file
diff --git a/keystone-moon/keystone/tests/unit/config_files/domain_configs_one_sql_one_ldap/keystone.Default.conf b/keystone-moon/keystone/tests/unit/config_files/domain_configs_one_sql_one_ldap/keystone.Default.conf
index 7049afed..2dd86c25 100644
--- a/keystone-moon/keystone/tests/unit/config_files/domain_configs_one_sql_one_ldap/keystone.Default.conf
+++ b/keystone-moon/keystone/tests/unit/config_files/domain_configs_one_sql_one_ldap/keystone.Default.conf
@@ -11,4 +11,4 @@ password = password
suffix = cn=example,cn=com
[identity]
-driver = keystone.identity.backends.ldap.Identity \ No newline at end of file
+driver = ldap \ No newline at end of file
diff --git a/keystone-moon/keystone/tests/unit/config_files/domain_configs_one_sql_one_ldap/keystone.domain1.conf b/keystone-moon/keystone/tests/unit/config_files/domain_configs_one_sql_one_ldap/keystone.domain1.conf
index a4492a67..fecc7bea 100644
--- a/keystone-moon/keystone/tests/unit/config_files/domain_configs_one_sql_one_ldap/keystone.domain1.conf
+++ b/keystone-moon/keystone/tests/unit/config_files/domain_configs_one_sql_one_ldap/keystone.domain1.conf
@@ -2,4 +2,4 @@
# 'domain1' for use with unit tests.
[identity]
-driver = keystone.identity.backends.sql.Identity \ No newline at end of file
+driver = sql \ No newline at end of file
diff --git a/keystone-moon/keystone/tests/unit/config_files/test_auth_plugin.conf b/keystone-moon/keystone/tests/unit/config_files/test_auth_plugin.conf
index abcc43ba..4a9e87d5 100644
--- a/keystone-moon/keystone/tests/unit/config_files/test_auth_plugin.conf
+++ b/keystone-moon/keystone/tests/unit/config_files/test_auth_plugin.conf
@@ -1,7 +1,4 @@
[auth]
methods = external,password,token,simple_challenge_response,saml2,openid,x509
simple_challenge_response = keystone.tests.unit.test_auth_plugin.SimpleChallengeResponse
-saml2 = keystone.auth.plugins.mapped.Mapped
-openid = keystone.auth.plugins.mapped.Mapped
-x509 = keystone.auth.plugins.mapped.Mapped
diff --git a/keystone-moon/keystone/tests/unit/contrib/__init__.py b/keystone-moon/keystone/tests/unit/contrib/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/contrib/__init__.py
diff --git a/keystone-moon/keystone/tests/unit/contrib/federation/__init__.py b/keystone-moon/keystone/tests/unit/contrib/federation/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/contrib/federation/__init__.py
diff --git a/keystone-moon/keystone/tests/unit/contrib/federation/test_utils.py b/keystone-moon/keystone/tests/unit/contrib/federation/test_utils.py
new file mode 100644
index 00000000..a8b4ae76
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/contrib/federation/test_utils.py
@@ -0,0 +1,611 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+
+from keystone.auth.plugins import mapped
+from keystone.contrib.federation import utils as mapping_utils
+from keystone import exception
+from keystone.tests import unit
+from keystone.tests.unit import mapping_fixtures
+
+
+class MappingRuleEngineTests(unit.BaseTestCase):
+ """A class for testing the mapping rule engine."""
+
+ def assertValidMappedUserObject(self, mapped_properties,
+ user_type='ephemeral',
+ domain_id=None):
+ """Check whether mapped properties object has 'user' within.
+
+ According to today's rules, RuleProcessor does not have to issue user's
+ id or name. What's actually required is user's type and for ephemeral
+ users that would be service domain named 'Federated'.
+ """
+ self.assertIn('user', mapped_properties,
+ message='Missing user object in mapped properties')
+ user = mapped_properties['user']
+ self.assertIn('type', user)
+ self.assertEqual(user_type, user['type'])
+ self.assertIn('domain', user)
+ domain = user['domain']
+ domain_name_or_id = domain.get('id') or domain.get('name')
+ domain_ref = domain_id or 'Federated'
+ self.assertEqual(domain_ref, domain_name_or_id)
+
+ def test_rule_engine_any_one_of_and_direct_mapping(self):
+ """Should return user's name and group id EMPLOYEE_GROUP_ID.
+
+ The ADMIN_ASSERTION should successfully have a match in MAPPING_LARGE.
+ They will test the case where `any_one_of` is valid, and there is
+ a direct mapping for the users name.
+
+ """
+
+ mapping = mapping_fixtures.MAPPING_LARGE
+ assertion = mapping_fixtures.ADMIN_ASSERTION
+ rp = mapping_utils.RuleProcessor(mapping['rules'])
+ values = rp.process(assertion)
+
+ fn = assertion.get('FirstName')
+ ln = assertion.get('LastName')
+ full_name = '%s %s' % (fn, ln)
+ group_ids = values.get('group_ids')
+ user_name = values.get('user', {}).get('name')
+
+ self.assertIn(mapping_fixtures.EMPLOYEE_GROUP_ID, group_ids)
+ self.assertEqual(full_name, user_name)
+
+ def test_rule_engine_no_regex_match(self):
+ """Should deny authorization, the email of the tester won't match.
+
+ This will not match since the email in the assertion will fail
+ the regex test. It is set to match any @example.com address.
+ But the incoming value is set to eviltester@example.org.
+ RuleProcessor should return list of empty group_ids.
+
+ """
+
+ mapping = mapping_fixtures.MAPPING_LARGE
+ assertion = mapping_fixtures.BAD_TESTER_ASSERTION
+ rp = mapping_utils.RuleProcessor(mapping['rules'])
+ mapped_properties = rp.process(assertion)
+
+ self.assertValidMappedUserObject(mapped_properties)
+ self.assertIsNone(mapped_properties['user'].get('name'))
+ self.assertListEqual(list(), mapped_properties['group_ids'])
+
+ def test_rule_engine_regex_many_groups(self):
+ """Should return group CONTRACTOR_GROUP_ID.
+
+ The TESTER_ASSERTION should successfully have a match in
+ MAPPING_TESTER_REGEX. This will test the case where many groups
+ are in the assertion, and a regex value is used to try and find
+ a match.
+
+ """
+
+ mapping = mapping_fixtures.MAPPING_TESTER_REGEX
+ assertion = mapping_fixtures.TESTER_ASSERTION
+ rp = mapping_utils.RuleProcessor(mapping['rules'])
+ values = rp.process(assertion)
+
+ self.assertValidMappedUserObject(values)
+ user_name = assertion.get('UserName')
+ group_ids = values.get('group_ids')
+ name = values.get('user', {}).get('name')
+
+ self.assertEqual(user_name, name)
+ self.assertIn(mapping_fixtures.TESTER_GROUP_ID, group_ids)
+
+ def test_rule_engine_any_one_of_many_rules(self):
+ """Should return group CONTRACTOR_GROUP_ID.
+
+ The CONTRACTOR_ASSERTION should successfully have a match in
+ MAPPING_SMALL. This will test the case where many rules
+ must be matched, including an `any_one_of`, and a direct
+ mapping.
+
+ """
+
+ mapping = mapping_fixtures.MAPPING_SMALL
+ assertion = mapping_fixtures.CONTRACTOR_ASSERTION
+ rp = mapping_utils.RuleProcessor(mapping['rules'])
+ values = rp.process(assertion)
+
+ self.assertValidMappedUserObject(values)
+ user_name = assertion.get('UserName')
+ group_ids = values.get('group_ids')
+ name = values.get('user', {}).get('name')
+
+ self.assertEqual(user_name, name)
+ self.assertIn(mapping_fixtures.CONTRACTOR_GROUP_ID, group_ids)
+
+ def test_rule_engine_not_any_of_and_direct_mapping(self):
+ """Should return user's name and email.
+
+ The CUSTOMER_ASSERTION should successfully have a match in
+ MAPPING_LARGE. This will test the case where a requirement
+ has `not_any_of`, and direct mapping to a username, no group.
+
+ """
+
+ mapping = mapping_fixtures.MAPPING_LARGE
+ assertion = mapping_fixtures.CUSTOMER_ASSERTION
+ rp = mapping_utils.RuleProcessor(mapping['rules'])
+ values = rp.process(assertion)
+
+ self.assertValidMappedUserObject(values)
+ user_name = assertion.get('UserName')
+ group_ids = values.get('group_ids')
+ name = values.get('user', {}).get('name')
+
+ self.assertEqual(user_name, name)
+ self.assertEqual([], group_ids,)
+
+ def test_rule_engine_not_any_of_many_rules(self):
+ """Should return group EMPLOYEE_GROUP_ID.
+
+ The EMPLOYEE_ASSERTION should successfully have a match in
+ MAPPING_SMALL. This will test the case where many remote
+ rules must be matched, including a `not_any_of`.
+
+ """
+
+ mapping = mapping_fixtures.MAPPING_SMALL
+ assertion = mapping_fixtures.EMPLOYEE_ASSERTION
+ rp = mapping_utils.RuleProcessor(mapping['rules'])
+ values = rp.process(assertion)
+
+ self.assertValidMappedUserObject(values)
+ user_name = assertion.get('UserName')
+ group_ids = values.get('group_ids')
+ name = values.get('user', {}).get('name')
+
+ self.assertEqual(user_name, name)
+ self.assertIn(mapping_fixtures.EMPLOYEE_GROUP_ID, group_ids)
+
+ def test_rule_engine_not_any_of_regex_verify_pass(self):
+ """Should return group DEVELOPER_GROUP_ID.
+
+ The DEVELOPER_ASSERTION should successfully have a match in
+ MAPPING_DEVELOPER_REGEX. This will test the case where many
+ remote rules must be matched, including a `not_any_of`, with
+ regex set to True.
+
+ """
+
+ mapping = mapping_fixtures.MAPPING_DEVELOPER_REGEX
+ assertion = mapping_fixtures.DEVELOPER_ASSERTION
+ rp = mapping_utils.RuleProcessor(mapping['rules'])
+ values = rp.process(assertion)
+
+ self.assertValidMappedUserObject(values)
+ user_name = assertion.get('UserName')
+ group_ids = values.get('group_ids')
+ name = values.get('user', {}).get('name')
+
+ self.assertEqual(user_name, name)
+ self.assertIn(mapping_fixtures.DEVELOPER_GROUP_ID, group_ids)
+
+ def test_rule_engine_not_any_of_regex_verify_fail(self):
+ """Should deny authorization.
+
+ The email in the assertion will fail the regex test.
+ It is set to reject any @example.org address, but the
+ incoming value is set to evildeveloper@example.org.
+ RuleProcessor should return list of empty group_ids.
+
+ """
+
+ mapping = mapping_fixtures.MAPPING_DEVELOPER_REGEX
+ assertion = mapping_fixtures.BAD_DEVELOPER_ASSERTION
+ rp = mapping_utils.RuleProcessor(mapping['rules'])
+ mapped_properties = rp.process(assertion)
+
+ self.assertValidMappedUserObject(mapped_properties)
+ self.assertIsNone(mapped_properties['user'].get('name'))
+ self.assertListEqual(list(), mapped_properties['group_ids'])
+
+ def _rule_engine_regex_match_and_many_groups(self, assertion):
+ """Should return group DEVELOPER_GROUP_ID and TESTER_GROUP_ID.
+
+ A helper function injecting assertion passed as an argument.
+ Expect DEVELOPER_GROUP_ID and TESTER_GROUP_ID in the results.
+
+ """
+
+ mapping = mapping_fixtures.MAPPING_LARGE
+ rp = mapping_utils.RuleProcessor(mapping['rules'])
+ values = rp.process(assertion)
+
+ user_name = assertion.get('UserName')
+ group_ids = values.get('group_ids')
+ name = values.get('user', {}).get('name')
+
+ self.assertValidMappedUserObject(values)
+ self.assertEqual(user_name, name)
+ self.assertIn(mapping_fixtures.DEVELOPER_GROUP_ID, group_ids)
+ self.assertIn(mapping_fixtures.TESTER_GROUP_ID, group_ids)
+
+ def test_rule_engine_regex_match_and_many_groups(self):
+ """Should return group DEVELOPER_GROUP_ID and TESTER_GROUP_ID.
+
+ The TESTER_ASSERTION should successfully have a match in
+ MAPPING_LARGE. This will test a successful regex match
+ for an `any_one_of` evaluation type, and will have many
+ groups returned.
+
+ """
+ self._rule_engine_regex_match_and_many_groups(
+ mapping_fixtures.TESTER_ASSERTION)
+
+ def test_rule_engine_discards_nonstring_objects(self):
+ """Check whether RuleProcessor discards non string objects.
+
+ Despite the fact that assertion is malformed and contains
+ non string objects, RuleProcessor should correctly discard them and
+ successfully have a match in MAPPING_LARGE.
+
+ """
+ self._rule_engine_regex_match_and_many_groups(
+ mapping_fixtures.MALFORMED_TESTER_ASSERTION)
+
+ def test_rule_engine_fails_after_discarding_nonstring(self):
+ """Check whether RuleProcessor discards non string objects.
+
+ Expect RuleProcessor to discard non string object, which
+ is required for a correct rule match. RuleProcessor will result with
+ empty list of groups.
+
+ """
+ mapping = mapping_fixtures.MAPPING_SMALL
+ rp = mapping_utils.RuleProcessor(mapping['rules'])
+ assertion = mapping_fixtures.CONTRACTOR_MALFORMED_ASSERTION
+ mapped_properties = rp.process(assertion)
+ self.assertValidMappedUserObject(mapped_properties)
+ self.assertIsNone(mapped_properties['user'].get('name'))
+ self.assertListEqual(list(), mapped_properties['group_ids'])
+
+ def test_rule_engine_returns_group_names(self):
+ """Check whether RuleProcessor returns group names with their domains.
+
+ RuleProcessor should return 'group_names' entry with a list of
+ dictionaries with two entries 'name' and 'domain' identifying group by
+ its name and domain.
+
+ """
+ mapping = mapping_fixtures.MAPPING_GROUP_NAMES
+ rp = mapping_utils.RuleProcessor(mapping['rules'])
+ assertion = mapping_fixtures.EMPLOYEE_ASSERTION
+ mapped_properties = rp.process(assertion)
+ self.assertIsNotNone(mapped_properties)
+ self.assertValidMappedUserObject(mapped_properties)
+ reference = {
+ mapping_fixtures.DEVELOPER_GROUP_NAME:
+ {
+ "name": mapping_fixtures.DEVELOPER_GROUP_NAME,
+ "domain": {
+ "name": mapping_fixtures.DEVELOPER_GROUP_DOMAIN_NAME
+ }
+ },
+ mapping_fixtures.TESTER_GROUP_NAME:
+ {
+ "name": mapping_fixtures.TESTER_GROUP_NAME,
+ "domain": {
+ "id": mapping_fixtures.DEVELOPER_GROUP_DOMAIN_ID
+ }
+ }
+ }
+ for rule in mapped_properties['group_names']:
+ self.assertDictEqual(reference.get(rule.get('name')), rule)
+
+ def test_rule_engine_whitelist_and_direct_groups_mapping(self):
+ """Should return user's groups Developer and Contractor.
+
+ The EMPLOYEE_ASSERTION_MULTIPLE_GROUPS should successfully have a match
+ in MAPPING_GROUPS_WHITELIST. It will test the case where 'whitelist'
+ correctly filters out Manager and only allows Developer and Contractor.
+
+ """
+
+ mapping = mapping_fixtures.MAPPING_GROUPS_WHITELIST
+ assertion = mapping_fixtures.EMPLOYEE_ASSERTION_MULTIPLE_GROUPS
+ rp = mapping_utils.RuleProcessor(mapping['rules'])
+ mapped_properties = rp.process(assertion)
+ self.assertIsNotNone(mapped_properties)
+
+ reference = {
+ mapping_fixtures.DEVELOPER_GROUP_NAME:
+ {
+ "name": mapping_fixtures.DEVELOPER_GROUP_NAME,
+ "domain": {
+ "id": mapping_fixtures.DEVELOPER_GROUP_DOMAIN_ID
+ }
+ },
+ mapping_fixtures.CONTRACTOR_GROUP_NAME:
+ {
+ "name": mapping_fixtures.CONTRACTOR_GROUP_NAME,
+ "domain": {
+ "id": mapping_fixtures.DEVELOPER_GROUP_DOMAIN_ID
+ }
+ }
+ }
+ for rule in mapped_properties['group_names']:
+ self.assertDictEqual(reference.get(rule.get('name')), rule)
+
+ self.assertEqual('tbo', mapped_properties['user']['name'])
+ self.assertEqual([], mapped_properties['group_ids'])
+
+ def test_rule_engine_blacklist_and_direct_groups_mapping(self):
+ """Should return user's group Developer.
+
+ The EMPLOYEE_ASSERTION_MULTIPLE_GROUPS should successfully have a match
+ in MAPPING_GROUPS_BLACKLIST. It will test the case where 'blacklist'
+ correctly filters out Manager and Developer and only allows Contractor.
+
+ """
+
+ mapping = mapping_fixtures.MAPPING_GROUPS_BLACKLIST
+ assertion = mapping_fixtures.EMPLOYEE_ASSERTION_MULTIPLE_GROUPS
+ rp = mapping_utils.RuleProcessor(mapping['rules'])
+ mapped_properties = rp.process(assertion)
+ self.assertIsNotNone(mapped_properties)
+
+ reference = {
+ mapping_fixtures.CONTRACTOR_GROUP_NAME:
+ {
+ "name": mapping_fixtures.CONTRACTOR_GROUP_NAME,
+ "domain": {
+ "id": mapping_fixtures.DEVELOPER_GROUP_DOMAIN_ID
+ }
+ }
+ }
+ for rule in mapped_properties['group_names']:
+ self.assertDictEqual(reference.get(rule.get('name')), rule)
+ self.assertEqual('tbo', mapped_properties['user']['name'])
+ self.assertEqual([], mapped_properties['group_ids'])
+
+ def test_rule_engine_blacklist_and_direct_groups_mapping_multiples(self):
+ """Tests matching multiple values before the blacklist.
+
+ Verifies that the local indexes are correct when matching multiple
+ remote values for a field when the field occurs before the blacklist
+ entry in the remote rules.
+
+ """
+
+ mapping = mapping_fixtures.MAPPING_GROUPS_BLACKLIST_MULTIPLES
+ assertion = mapping_fixtures.EMPLOYEE_ASSERTION_MULTIPLE_GROUPS
+ rp = mapping_utils.RuleProcessor(mapping['rules'])
+ mapped_properties = rp.process(assertion)
+ self.assertIsNotNone(mapped_properties)
+
+ reference = {
+ mapping_fixtures.CONTRACTOR_GROUP_NAME:
+ {
+ "name": mapping_fixtures.CONTRACTOR_GROUP_NAME,
+ "domain": {
+ "id": mapping_fixtures.DEVELOPER_GROUP_DOMAIN_ID
+ }
+ }
+ }
+ for rule in mapped_properties['group_names']:
+ self.assertDictEqual(reference.get(rule.get('name')), rule)
+ self.assertEqual('tbo', mapped_properties['user']['name'])
+ self.assertEqual([], mapped_properties['group_ids'])
+
+ def test_rule_engine_whitelist_direct_group_mapping_missing_domain(self):
+ """Test if the local rule is rejected upon missing domain value
+
+ This is a variation with a ``whitelist`` filter.
+
+ """
+ mapping = mapping_fixtures.MAPPING_GROUPS_WHITELIST_MISSING_DOMAIN
+ assertion = mapping_fixtures.EMPLOYEE_ASSERTION_MULTIPLE_GROUPS
+ rp = mapping_utils.RuleProcessor(mapping['rules'])
+ self.assertRaises(exception.ValidationError, rp.process, assertion)
+
+ def test_rule_engine_blacklist_direct_group_mapping_missing_domain(self):
+ """Test if the local rule is rejected upon missing domain value
+
+ This is a variation with a ``blacklist`` filter.
+
+ """
+ mapping = mapping_fixtures.MAPPING_GROUPS_BLACKLIST_MISSING_DOMAIN
+ assertion = mapping_fixtures.EMPLOYEE_ASSERTION_MULTIPLE_GROUPS
+ rp = mapping_utils.RuleProcessor(mapping['rules'])
+ self.assertRaises(exception.ValidationError, rp.process, assertion)
+
+ def test_rule_engine_no_groups_allowed(self):
+ """Should return user mapped to no groups.
+
+ The EMPLOYEE_ASSERTION should successfully have a match
+ in MAPPING_GROUPS_WHITELIST, but 'whitelist' should filter out
+ the group values from the assertion and thus map to no groups.
+
+ """
+ mapping = mapping_fixtures.MAPPING_GROUPS_WHITELIST
+ assertion = mapping_fixtures.EMPLOYEE_ASSERTION
+ rp = mapping_utils.RuleProcessor(mapping['rules'])
+ mapped_properties = rp.process(assertion)
+ self.assertIsNotNone(mapped_properties)
+ self.assertListEqual(mapped_properties['group_names'], [])
+ self.assertListEqual(mapped_properties['group_ids'], [])
+ self.assertEqual('tbo', mapped_properties['user']['name'])
+
+ def test_mapping_federated_domain_specified(self):
+ """Test mapping engine when domain 'ephemeral' is explicitely set.
+
+ For that, we use mapping rule MAPPING_EPHEMERAL_USER and assertion
+ EMPLOYEE_ASSERTION
+
+ """
+ mapping = mapping_fixtures.MAPPING_EPHEMERAL_USER
+ rp = mapping_utils.RuleProcessor(mapping['rules'])
+ assertion = mapping_fixtures.EMPLOYEE_ASSERTION
+ mapped_properties = rp.process(assertion)
+ self.assertIsNotNone(mapped_properties)
+ self.assertValidMappedUserObject(mapped_properties)
+
+ def test_create_user_object_with_bad_mapping(self):
+ """Test if user object is created even with bad mapping.
+
+ User objects will be created by mapping engine always as long as there
+ is corresponding local rule. This test shows, that even with assertion
+ where no group names nor ids are matched, but there is 'blind' rule for
+ mapping user, such object will be created.
+
+ In this test MAPPING_EHPEMERAL_USER expects UserName set to jsmith
+ whereas value from assertion is 'tbo'.
+
+ """
+ mapping = mapping_fixtures.MAPPING_EPHEMERAL_USER
+ rp = mapping_utils.RuleProcessor(mapping['rules'])
+ assertion = mapping_fixtures.CONTRACTOR_ASSERTION
+ mapped_properties = rp.process(assertion)
+ self.assertIsNotNone(mapped_properties)
+ self.assertValidMappedUserObject(mapped_properties)
+
+ self.assertNotIn('id', mapped_properties['user'])
+ self.assertNotIn('name', mapped_properties['user'])
+
+ def test_set_ephemeral_domain_to_ephemeral_users(self):
+ """Test auto assigning service domain to ephemeral users.
+
+ Test that ephemeral users will always become members of federated
+ service domain. The check depends on ``type`` value which must be set
+ to ``ephemeral`` in case of ephemeral user.
+
+ """
+ mapping = mapping_fixtures.MAPPING_EPHEMERAL_USER_LOCAL_DOMAIN
+ rp = mapping_utils.RuleProcessor(mapping['rules'])
+ assertion = mapping_fixtures.CONTRACTOR_ASSERTION
+ mapped_properties = rp.process(assertion)
+ self.assertIsNotNone(mapped_properties)
+ self.assertValidMappedUserObject(mapped_properties)
+
+ def test_local_user_local_domain(self):
+ """Test that local users can have non-service domains assigned."""
+ mapping = mapping_fixtures.MAPPING_LOCAL_USER_LOCAL_DOMAIN
+ rp = mapping_utils.RuleProcessor(mapping['rules'])
+ assertion = mapping_fixtures.CONTRACTOR_ASSERTION
+ mapped_properties = rp.process(assertion)
+ self.assertIsNotNone(mapped_properties)
+ self.assertValidMappedUserObject(
+ mapped_properties, user_type='local',
+ domain_id=mapping_fixtures.LOCAL_DOMAIN)
+
+ def test_user_identifications_name(self):
+ """Test varius mapping options and how users are identified.
+
+ This test calls mapped.setup_username() for propagating user object.
+
+ Test plan:
+ - Check if the user has proper domain ('federated') set
+ - Check if the user has property type set ('ephemeral')
+ - Check if user's name is properly mapped from the assertion
+ - Check if user's id is properly set and equal to name, as it was not
+ explicitely specified in the mapping.
+
+ """
+ mapping = mapping_fixtures.MAPPING_USER_IDS
+ rp = mapping_utils.RuleProcessor(mapping['rules'])
+ assertion = mapping_fixtures.CONTRACTOR_ASSERTION
+ mapped_properties = rp.process(assertion)
+ self.assertIsNotNone(mapped_properties)
+ self.assertValidMappedUserObject(mapped_properties)
+ mapped.setup_username({}, mapped_properties)
+ self.assertEqual('jsmith', mapped_properties['user']['id'])
+ self.assertEqual('jsmith', mapped_properties['user']['name'])
+
+ def test_user_identifications_name_and_federated_domain(self):
+ """Test varius mapping options and how users are identified.
+
+ This test calls mapped.setup_username() for propagating user object.
+
+ Test plan:
+ - Check if the user has proper domain ('federated') set
+ - Check if the user has propert type set ('ephemeral')
+ - Check if user's name is properly mapped from the assertion
+ - Check if user's id is properly set and equal to name, as it was not
+ explicitely specified in the mapping.
+
+ """
+ mapping = mapping_fixtures.MAPPING_USER_IDS
+ rp = mapping_utils.RuleProcessor(mapping['rules'])
+ assertion = mapping_fixtures.EMPLOYEE_ASSERTION
+ mapped_properties = rp.process(assertion)
+ self.assertIsNotNone(mapped_properties)
+ self.assertValidMappedUserObject(mapped_properties)
+ mapped.setup_username({}, mapped_properties)
+ self.assertEqual('tbo', mapped_properties['user']['name'])
+ self.assertEqual('abc123%40example.com',
+ mapped_properties['user']['id'])
+
+ def test_user_identification_id(self):
+ """Test varius mapping options and how users are identified.
+
+ This test calls mapped.setup_username() for propagating user object.
+
+ Test plan:
+ - Check if the user has proper domain ('federated') set
+ - Check if the user has propert type set ('ephemeral')
+ - Check if user's id is properly mapped from the assertion
+ - Check if user's name is properly set and equal to id, as it was not
+ explicitely specified in the mapping.
+
+ """
+ mapping = mapping_fixtures.MAPPING_USER_IDS
+ rp = mapping_utils.RuleProcessor(mapping['rules'])
+ assertion = mapping_fixtures.ADMIN_ASSERTION
+ mapped_properties = rp.process(assertion)
+ context = {'environment': {}}
+ self.assertIsNotNone(mapped_properties)
+ self.assertValidMappedUserObject(mapped_properties)
+ mapped.setup_username(context, mapped_properties)
+ self.assertEqual('bob', mapped_properties['user']['name'])
+ self.assertEqual('bob', mapped_properties['user']['id'])
+
+ def test_user_identification_id_and_name(self):
+ """Test varius mapping options and how users are identified.
+
+ This test calls mapped.setup_username() for propagating user object.
+
+ Test plan:
+ - Check if the user has proper domain ('federated') set
+ - Check if the user has proper type set ('ephemeral')
+ - Check if user's name is properly mapped from the assertion
+ - Check if user's id is properly set and and equal to value hardcoded
+ in the mapping
+
+ This test does two iterations with different assertions used as input
+ for the Mapping Engine. Different assertions will be matched with
+ different rules in the ruleset, effectively issuing different user_id
+ (hardcoded values). In the first iteration, the hardcoded user_id is
+ not url-safe and we expect Keystone to make it url safe. In the latter
+ iteration, provided user_id is already url-safe and we expect server
+ not to change it.
+
+ """
+ testcases = [(mapping_fixtures.CUSTOMER_ASSERTION, 'bwilliams'),
+ (mapping_fixtures.EMPLOYEE_ASSERTION, 'tbo')]
+ for assertion, exp_user_name in testcases:
+ mapping = mapping_fixtures.MAPPING_USER_IDS
+ rp = mapping_utils.RuleProcessor(mapping['rules'])
+ mapped_properties = rp.process(assertion)
+ context = {'environment': {}}
+ self.assertIsNotNone(mapped_properties)
+ self.assertValidMappedUserObject(mapped_properties)
+ mapped.setup_username(context, mapped_properties)
+ self.assertEqual(exp_user_name, mapped_properties['user']['name'])
+ self.assertEqual('abc123%40example.com',
+ mapped_properties['user']['id'])
diff --git a/keystone-moon/keystone/tests/unit/core.py b/keystone-moon/keystone/tests/unit/core.py
index caca7dbd..e999b641 100644
--- a/keystone-moon/keystone/tests/unit/core.py
+++ b/keystone-moon/keystone/tests/unit/core.py
@@ -45,6 +45,7 @@ from keystone.common import config as common_cfg
from keystone.common import dependency
from keystone.common import kvs
from keystone.common.kvs import core as kvs_core
+from keystone.common import sql
from keystone import config
from keystone import controllers
from keystone import exception
@@ -145,8 +146,9 @@ def remove_generated_paste_config(extension_name):
def skip_if_cache_disabled(*sections):
- """This decorator is used to skip a test if caching is disabled either
- globally or for the specific section.
+ """This decorator is used to skip a test if caching is disabled.
+
+ Caching can be disabled either globally or for a specific section.
In the code fragment::
@@ -163,6 +165,7 @@ def skip_if_cache_disabled(*sections):
If a specified configuration section does not define the `caching` option,
this decorator makes the same assumption as the `should_cache_fn` in
keystone.common.cache that caching should be enabled.
+
"""
def wrapper(f):
@functools.wraps(f)
@@ -180,9 +183,7 @@ def skip_if_cache_disabled(*sections):
def skip_if_no_multiple_domains_support(f):
- """This decorator is used to skip a test if an identity driver
- does not support multiple domains.
- """
+ """Decorator to skip tests for identity drivers limited to one domain."""
@functools.wraps(f)
def wrapper(*args, **kwargs):
test_obj = args[0]
@@ -215,7 +216,7 @@ class TestClient(object):
req = webob.Request.blank(path)
req.method = method
- for k, v in six.iteritems(headers):
+ for k, v in headers.items():
req.headers[k] = v
if body:
req.body = body
@@ -244,6 +245,13 @@ class BaseTestCase(oslotest.BaseTestCase):
super(BaseTestCase, self).setUp()
self.useFixture(mockpatch.PatchObject(sys, 'exit',
side_effect=UnexpectedExit))
+ self.useFixture(mockpatch.PatchObject(logging.Handler, 'handleError',
+ side_effect=BadLog))
+
+ warnings.filterwarnings('error', category=DeprecationWarning,
+ module='^keystone\\.')
+ warnings.simplefilter('error', exc.SAWarning)
+ self.addCleanup(warnings.resetwarnings)
def cleanup_instance(self, *names):
"""Create a function suitable for use with self.addCleanup.
@@ -261,13 +269,17 @@ class BaseTestCase(oslotest.BaseTestCase):
return cleanup
-@dependency.requires('revoke_api')
class TestCase(BaseTestCase):
def config_files(self):
return []
def config_overrides(self):
+ # NOTE(morganfainberg): enforce config_overrides can only ever be
+ # called a single time.
+ assert self.__config_overrides_called is False
+ self.__config_overrides_called = True
+
signing_certfile = 'examples/pki/certs/signing_cert.pem'
signing_keyfile = 'examples/pki/private/signing_key.pem'
self.config_fixture.config(group='oslo_policy',
@@ -281,30 +293,20 @@ class TestCase(BaseTestCase):
proxies=['keystone.tests.unit.test_cache.CacheIsolatingProxy'])
self.config_fixture.config(
group='catalog',
- driver='keystone.catalog.backends.templated.Catalog',
+ driver='templated',
template_file=dirs.tests('default_catalog.templates'))
self.config_fixture.config(
- group='identity',
- driver='keystone.identity.backends.sql.Identity')
- self.config_fixture.config(
group='kvs',
backends=[
('keystone.tests.unit.test_kvs.'
'KVSBackendForcedKeyMangleFixture'),
'keystone.tests.unit.test_kvs.KVSBackendFixture'])
- self.config_fixture.config(
- group='revoke',
- driver='keystone.contrib.revoke.backends.kvs.Revoke')
+ self.config_fixture.config(group='revoke', driver='kvs')
self.config_fixture.config(
group='signing', certfile=signing_certfile,
keyfile=signing_keyfile,
ca_certs='examples/pki/certs/cacert.pem')
- self.config_fixture.config(
- group='token',
- driver='keystone.token.persistence.backends.kvs.Token')
- self.config_fixture.config(
- group='trust',
- driver='keystone.trust.backends.sql.Trust')
+ self.config_fixture.config(group='token', driver='kvs')
self.config_fixture.config(
group='saml', certfile=signing_certfile, keyfile=signing_keyfile)
self.config_fixture.config(
@@ -327,28 +329,21 @@ class TestCase(BaseTestCase):
self.auth_plugin_config_override()
def auth_plugin_config_override(self, methods=None, **method_classes):
- if methods is None:
- methods = ['external', 'password', 'token', ]
- if not method_classes:
- method_classes = dict(
- external='keystone.auth.plugins.external.DefaultDomain',
- password='keystone.auth.plugins.password.Password',
- token='keystone.auth.plugins.token.Token',
- )
- self.config_fixture.config(group='auth', methods=methods)
- common_cfg.setup_authentication()
+ if methods is not None:
+ self.config_fixture.config(group='auth', methods=methods)
+ common_cfg.setup_authentication()
if method_classes:
self.config_fixture.config(group='auth', **method_classes)
+ def _assert_config_overrides_called(self):
+ assert self.__config_overrides_called is True
+
def setUp(self):
super(TestCase, self).setUp()
- self.addCleanup(self.cleanup_instance('config_fixture', 'logger'))
-
+ self.__config_overrides_called = False
self.addCleanup(CONF.reset)
-
- self.useFixture(mockpatch.PatchObject(logging.Handler, 'handleError',
- side_effect=BadLog))
self.config_fixture = self.useFixture(config_fixture.Config(CONF))
+ self.addCleanup(delattr, self, 'config_fixture')
self.config(self.config_files())
# NOTE(morganfainberg): mock the auth plugin setup to use the config
@@ -356,13 +351,15 @@ class TestCase(BaseTestCase):
# cleanup.
def mocked_register_auth_plugin_opt(conf, opt):
self.config_fixture.register_opt(opt, group='auth')
- self.register_auth_plugin_opt_patch = self.useFixture(
- mockpatch.PatchObject(common_cfg, '_register_auth_plugin_opt',
- new=mocked_register_auth_plugin_opt))
+ self.useFixture(mockpatch.PatchObject(
+ common_cfg, '_register_auth_plugin_opt',
+ new=mocked_register_auth_plugin_opt))
self.config_overrides()
+ # NOTE(morganfainberg): ensure config_overrides has been called.
+ self.addCleanup(self._assert_config_overrides_called)
- self.logger = self.useFixture(fixtures.FakeLogger(level=logging.DEBUG))
+ self.useFixture(fixtures.FakeLogger(level=logging.DEBUG))
# NOTE(morganfainberg): This code is a copy from the oslo-incubator
# log module. This is not in a function or otherwise available to use
@@ -374,11 +371,6 @@ class TestCase(BaseTestCase):
logger = logging.getLogger(mod)
logger.setLevel(level_name)
- warnings.filterwarnings('error', category=DeprecationWarning,
- module='^keystone\\.')
- warnings.simplefilter('error', exc.SAWarning)
- self.addCleanup(warnings.resetwarnings)
-
self.useFixture(ksfixtures.Cache())
# Clear the registry of providers so that providers from previous
@@ -397,6 +389,7 @@ class TestCase(BaseTestCase):
self.addCleanup(setattr, controllers, '_VERSIONS', [])
def config(self, config_files):
+ sql.initialize()
CONF(args=[], project='keystone', default_config_files=config_files)
def load_backends(self):
@@ -417,9 +410,9 @@ class TestCase(BaseTestCase):
drivers, _unused = common.setup_backends(
load_extra_backends_fn=self.load_extra_backends)
- for manager_name, manager in six.iteritems(drivers):
+ for manager_name, manager in drivers.items():
setattr(self, manager_name, manager)
- self.addCleanup(self.cleanup_instance(*drivers.keys()))
+ self.addCleanup(self.cleanup_instance(*list(drivers.keys())))
def load_extra_backends(self):
"""Override to load managers that aren't loaded by default.
@@ -541,15 +534,9 @@ class TestCase(BaseTestCase):
def assertNotEmpty(self, l):
self.assertTrue(len(l))
- def assertDictEqual(self, d1, d2, msg=None):
- self.assertIsInstance(d1, dict)
- self.assertIsInstance(d2, dict)
- self.assertEqual(d1, d2, msg)
-
def assertRaisesRegexp(self, expected_exception, expected_regexp,
callable_obj, *args, **kwargs):
- """Asserts that the message in a raised exception matches a regexp.
- """
+ """Asserts that the message in a raised exception matches a regexp."""
try:
callable_obj(*args, **kwargs)
except expected_exception as exc_value:
@@ -573,43 +560,6 @@ class TestCase(BaseTestCase):
excName = str(expected_exception)
raise self.failureException("%s not raised" % excName)
- def assertDictContainsSubset(self, expected, actual, msg=None):
- """Checks whether actual is a superset of expected."""
-
- def safe_repr(obj, short=False):
- _MAX_LENGTH = 80
- try:
- result = repr(obj)
- except Exception:
- result = object.__repr__(obj)
- if not short or len(result) < _MAX_LENGTH:
- return result
- return result[:_MAX_LENGTH] + ' [truncated]...'
-
- missing = []
- mismatched = []
- for key, value in six.iteritems(expected):
- if key not in actual:
- missing.append(key)
- elif value != actual[key]:
- mismatched.append('%s, expected: %s, actual: %s' %
- (safe_repr(key), safe_repr(value),
- safe_repr(actual[key])))
-
- if not (missing or mismatched):
- return
-
- standardMsg = ''
- if missing:
- standardMsg = 'Missing: %s' % ','.join(safe_repr(m) for m in
- missing)
- if mismatched:
- if standardMsg:
- standardMsg += '; '
- standardMsg += 'Mismatched values: %s' % ','.join(mismatched)
-
- self.fail(self._formatMessage(msg, standardMsg))
-
@property
def ipv6_enabled(self):
if socket.has_ipv6:
@@ -640,21 +590,9 @@ class SQLDriverOverrides(object):
def config_overrides(self):
super(SQLDriverOverrides, self).config_overrides()
# SQL specific driver overrides
- self.config_fixture.config(
- group='catalog',
- driver='keystone.catalog.backends.sql.Catalog')
- self.config_fixture.config(
- group='identity',
- driver='keystone.identity.backends.sql.Identity')
- self.config_fixture.config(
- group='policy',
- driver='keystone.policy.backends.sql.Policy')
- self.config_fixture.config(
- group='revoke',
- driver='keystone.contrib.revoke.backends.sql.Revoke')
- self.config_fixture.config(
- group='token',
- driver='keystone.token.persistence.backends.sql.Token')
- self.config_fixture.config(
- group='trust',
- driver='keystone.trust.backends.sql.Trust')
+ self.config_fixture.config(group='catalog', driver='sql')
+ self.config_fixture.config(group='identity', driver='sql')
+ self.config_fixture.config(group='policy', driver='sql')
+ self.config_fixture.config(group='revoke', driver='sql')
+ self.config_fixture.config(group='token', driver='sql')
+ self.config_fixture.config(group='trust', driver='sql')
diff --git a/keystone-moon/keystone/tests/unit/default_fixtures.py b/keystone-moon/keystone/tests/unit/default_fixtures.py
index f7e2064f..80b0665f 100644
--- a/keystone-moon/keystone/tests/unit/default_fixtures.py
+++ b/keystone-moon/keystone/tests/unit/default_fixtures.py
@@ -25,6 +25,7 @@ TENANTS = [
'description': 'description',
'enabled': True,
'parent_id': None,
+ 'is_domain': False,
}, {
'id': 'baz',
'name': 'BAZ',
@@ -32,6 +33,7 @@ TENANTS = [
'description': 'description',
'enabled': True,
'parent_id': None,
+ 'is_domain': False,
}, {
'id': 'mtu',
'name': 'MTU',
@@ -39,6 +41,7 @@ TENANTS = [
'enabled': True,
'domain_id': DEFAULT_DOMAIN_ID,
'parent_id': None,
+ 'is_domain': False,
}, {
'id': 'service',
'name': 'service',
@@ -46,6 +49,7 @@ TENANTS = [
'enabled': True,
'domain_id': DEFAULT_DOMAIN_ID,
'parent_id': None,
+ 'is_domain': False,
}
]
diff --git a/keystone-moon/keystone/tests/unit/fakeldap.py b/keystone-moon/keystone/tests/unit/fakeldap.py
index 85aaadfe..2f1ebe57 100644
--- a/keystone-moon/keystone/tests/unit/fakeldap.py
+++ b/keystone-moon/keystone/tests/unit/fakeldap.py
@@ -87,7 +87,7 @@ def _internal_attr(attr_name, value_or_values):
return [attr_fn(value_or_values)]
-def _match_query(query, attrs):
+def _match_query(query, attrs, attrs_checked):
"""Match an ldap query to an attribute dictionary.
The characters &, |, and ! are supported in the query. No syntax checking
@@ -102,12 +102,14 @@ def _match_query(query, attrs):
matchfn = any
# cut off the & or |
groups = _paren_groups(inner[1:])
- return matchfn(_match_query(group, attrs) for group in groups)
+ return matchfn(_match_query(group, attrs, attrs_checked)
+ for group in groups)
if inner.startswith('!'):
# cut off the ! and the nested parentheses
- return not _match_query(query[2:-1], attrs)
+ return not _match_query(query[2:-1], attrs, attrs_checked)
(k, _sep, v) = inner.partition('=')
+ attrs_checked.add(k.lower())
return _match(k, v, attrs)
@@ -210,7 +212,7 @@ FakeShelves = {}
class FakeLdap(core.LDAPHandler):
- '''Emulate the python-ldap API.
+ """Emulate the python-ldap API.
The python-ldap API requires all strings to be UTF-8 encoded. This
is assured by the caller of this interface
@@ -223,7 +225,8 @@ class FakeLdap(core.LDAPHandler):
strings, decodes them to unicode for operations internal to this
emulation, and encodes them back to UTF-8 when returning values
from the emulation.
- '''
+
+ """
__prefix = 'ldap:'
@@ -254,7 +257,7 @@ class FakeLdap(core.LDAPHandler):
ldap.set_option(ldap.OPT_X_TLS_CACERTFILE, tls_cacertfile)
elif tls_cacertdir:
ldap.set_option(ldap.OPT_X_TLS_CACERTDIR, tls_cacertdir)
- if tls_req_cert in core.LDAP_TLS_CERTS.values():
+ if tls_req_cert in list(core.LDAP_TLS_CERTS.values()):
ldap.set_option(ldap.OPT_X_TLS_REQUIRE_CERT, tls_req_cert)
else:
raise ValueError("invalid TLS_REQUIRE_CERT tls_req_cert=%s",
@@ -356,7 +359,7 @@ class FakeLdap(core.LDAPHandler):
return self.delete_ext_s(dn, serverctrls=[])
def _getChildren(self, dn):
- return [k for k, v in six.iteritems(self.db)
+ return [k for k, v in self.db.items()
if re.match('%s.*,%s' % (
re.escape(self.__prefix),
re.escape(self.dn(dn))), k)]
@@ -451,6 +454,10 @@ class FakeLdap(core.LDAPHandler):
if server_fail:
raise ldap.SERVER_DOWN
+ if (not filterstr) and (scope != ldap.SCOPE_BASE):
+ raise AssertionError('Search without filter on onelevel or '
+ 'subtree scope')
+
if scope == ldap.SCOPE_BASE:
try:
item_dict = self.db[self.key(base)]
@@ -473,7 +480,7 @@ class FakeLdap(core.LDAPHandler):
raise ldap.NO_SUCH_OBJECT
results = [(base, item_dict)]
extraresults = [(k[len(self.__prefix):], v)
- for k, v in six.iteritems(self.db)
+ for k, v in self.db.items()
if re.match('%s.*,%s' %
(re.escape(self.__prefix),
re.escape(self.dn(base))), k)]
@@ -484,7 +491,7 @@ class FakeLdap(core.LDAPHandler):
base_dn = ldap.dn.str2dn(core.utf8_encode(base))
base_len = len(base_dn)
- for k, v in six.iteritems(self.db):
+ for k, v in self.db.items():
if not k.startswith(self.__prefix):
continue
k_dn_str = k[len(self.__prefix):]
@@ -509,9 +516,15 @@ class FakeLdap(core.LDAPHandler):
id_val = core.utf8_decode(id_val)
match_attrs = attrs.copy()
match_attrs[id_attr] = [id_val]
- if not filterstr or _match_query(filterstr, match_attrs):
+ attrs_checked = set()
+ if not filterstr or _match_query(filterstr, match_attrs,
+ attrs_checked):
+ if (filterstr and
+ (scope != ldap.SCOPE_BASE) and
+ ('objectclass' not in attrs_checked)):
+ raise AssertionError('No objectClass in search filter')
# filter the attributes by attrlist
- attrs = {k: v for k, v in six.iteritems(attrs)
+ attrs = {k: v for k, v in attrs.items()
if not attrlist or k in attrlist}
objects.append((dn, attrs))
@@ -536,11 +549,11 @@ class FakeLdap(core.LDAPHandler):
class FakeLdapPool(FakeLdap):
- '''Emulate the python-ldap API with pooled connections using existing
- FakeLdap logic.
+ """Emulate the python-ldap API with pooled connections.
This class is used as connector class in PooledLDAPHandler.
- '''
+
+ """
def __init__(self, uri, retry_max=None, retry_delay=None, conn=None):
super(FakeLdapPool, self).__init__(conn=conn)
@@ -571,7 +584,7 @@ class FakeLdapPool(FakeLdap):
clientctrls=clientctrls)
def unbind_ext_s(self):
- '''Added to extend FakeLdap as connector class.'''
+ """Added to extend FakeLdap as connector class."""
pass
diff --git a/keystone-moon/keystone/tests/unit/filtering.py b/keystone-moon/keystone/tests/unit/filtering.py
index 1a31a23f..93e0bc28 100644
--- a/keystone-moon/keystone/tests/unit/filtering.py
+++ b/keystone-moon/keystone/tests/unit/filtering.py
@@ -15,6 +15,7 @@
import uuid
from oslo_config import cfg
+from six.moves import range
CONF = cfg.CONF
@@ -41,20 +42,50 @@ class FilterTests(object):
self.assertTrue(found)
def _create_entity(self, entity_type):
+ """Find the create_<entity_type> method.
+
+ Searches through the [identity_api, resource_api, assignment_api]
+ managers for a method called create_<entity_type> and returns the first
+ one.
+
+ """
+
f = getattr(self.identity_api, 'create_%s' % entity_type, None)
if f is None:
+ f = getattr(self.resource_api, 'create_%s' % entity_type, None)
+ if f is None:
f = getattr(self.assignment_api, 'create_%s' % entity_type)
return f
def _delete_entity(self, entity_type):
+ """Find the delete_<entity_type> method.
+
+ Searches through the [identity_api, resource_api, assignment_api]
+ managers for a method called delete_<entity_type> and returns the first
+ one.
+
+ """
+
f = getattr(self.identity_api, 'delete_%s' % entity_type, None)
if f is None:
+ f = getattr(self.resource_api, 'delete_%s' % entity_type, None)
+ if f is None:
f = getattr(self.assignment_api, 'delete_%s' % entity_type)
return f
def _list_entities(self, entity_type):
+ """Find the list_<entity_type> method.
+
+ Searches through the [identity_api, resource_api, assignment_api]
+ managers for a method called list_<entity_type> and returns the first
+ one.
+
+ """
+
f = getattr(self.identity_api, 'list_%ss' % entity_type, None)
if f is None:
+ f = getattr(self.resource_api, 'list_%ss' % entity_type, None)
+ if f is None:
f = getattr(self.assignment_api, 'list_%ss' % entity_type)
return f
diff --git a/keystone-moon/keystone/tests/unit/identity/test_core.py b/keystone-moon/keystone/tests/unit/identity/test_core.py
index 6c8faebb..fa95ec50 100644
--- a/keystone-moon/keystone/tests/unit/identity/test_core.py
+++ b/keystone-moon/keystone/tests/unit/identity/test_core.py
@@ -12,11 +12,13 @@
"""Unit tests for core identity behavior."""
+import itertools
import os
import uuid
import mock
from oslo_config import cfg
+from oslo_config import fixture as config_fixture
from keystone import exception
from keystone import identity
@@ -34,7 +36,10 @@ class TestDomainConfigs(tests.BaseTestCase):
self.addCleanup(CONF.reset)
self.tmp_dir = tests.dirs.tmp()
- CONF.set_override('domain_config_dir', self.tmp_dir, 'identity')
+
+ self.config_fixture = self.useFixture(config_fixture.Config(CONF))
+ self.config_fixture.config(domain_config_dir=self.tmp_dir,
+ group='identity')
def test_config_for_nonexistent_domain(self):
"""Having a config for a non-existent domain will be ignored.
@@ -80,6 +85,45 @@ class TestDomainConfigs(tests.BaseTestCase):
[domain_config_filename],
'abc.def.com')
+ def test_config_for_multiple_sql_backend(self):
+ domains_config = identity.DomainConfigs()
+
+ # Create the right sequence of is_sql in the drivers being
+ # requested to expose the bug, which is that a False setting
+ # means it forgets previous True settings.
+ drivers = []
+ files = []
+ for idx, is_sql in enumerate((True, False, True)):
+ drv = mock.Mock(is_sql=is_sql)
+ drivers.append(drv)
+ name = 'dummy.{0}'.format(idx)
+ files.append(''.join((
+ identity.DOMAIN_CONF_FHEAD,
+ name,
+ identity.DOMAIN_CONF_FTAIL)))
+
+ walk_fake = lambda *a, **kwa: (
+ ('/fake/keystone/domains/config', [], files), )
+
+ generic_driver = mock.Mock(is_sql=False)
+
+ assignment_api = mock.Mock()
+ id_factory = itertools.count()
+ assignment_api.get_domain_by_name.side_effect = (
+ lambda name: {'id': next(id_factory), '_': 'fake_domain'})
+ load_driver_mock = mock.Mock(side_effect=drivers)
+
+ with mock.patch.object(os, 'walk', walk_fake):
+ with mock.patch.object(identity.cfg, 'ConfigOpts'):
+ with mock.patch.object(domains_config, '_load_driver',
+ load_driver_mock):
+ self.assertRaises(
+ exception.MultipleSQLDriversInConfig,
+ domains_config.setup_domain_drivers,
+ generic_driver, assignment_api)
+
+ self.assertEqual(3, load_driver_mock.call_count)
+
class TestDatabaseDomainConfigs(tests.TestCase):
@@ -92,15 +136,16 @@ class TestDatabaseDomainConfigs(tests.TestCase):
self.assertFalse(CONF.identity.domain_configurations_from_database)
def test_loading_config_from_database(self):
- CONF.set_override('domain_configurations_from_database', True,
- 'identity')
+ self.config_fixture.config(domain_configurations_from_database=True,
+ group='identity')
domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
self.resource_api.create_domain(domain['id'], domain)
# Override two config options for our domain
conf = {'ldap': {'url': uuid.uuid4().hex,
- 'suffix': uuid.uuid4().hex},
+ 'suffix': uuid.uuid4().hex,
+ 'use_tls': 'True'},
'identity': {
- 'driver': 'keystone.identity.backends.ldap.Identity'}}
+ 'driver': 'ldap'}}
self.domain_config_api.create_config(domain['id'], conf)
fake_standard_driver = None
domain_config = identity.DomainConfigs()
@@ -112,6 +157,11 @@ class TestDatabaseDomainConfigs(tests.TestCase):
self.assertEqual(conf['ldap']['suffix'], res.ldap.suffix)
self.assertEqual(CONF.ldap.query_scope, res.ldap.query_scope)
+ # Make sure the override is not changing the type of the config value
+ use_tls_type = type(CONF.ldap.use_tls)
+ self.assertEqual(use_tls_type(conf['ldap']['use_tls']),
+ res.ldap.use_tls)
+
# Now turn off using database domain configuration and check that the
# default config file values are now seen instead of the overrides.
CONF.set_override('domain_configurations_from_database', False,
@@ -122,4 +172,5 @@ class TestDatabaseDomainConfigs(tests.TestCase):
res = domain_config.get_domain_conf(domain['id'])
self.assertEqual(CONF.ldap.url, res.ldap.url)
self.assertEqual(CONF.ldap.suffix, res.ldap.suffix)
+ self.assertEqual(CONF.ldap.use_tls, res.ldap.use_tls)
self.assertEqual(CONF.ldap.query_scope, res.ldap.query_scope)
diff --git a/keystone-moon/keystone/tests/unit/ksfixtures/database.py b/keystone-moon/keystone/tests/unit/ksfixtures/database.py
index 15597539..0012df74 100644
--- a/keystone-moon/keystone/tests/unit/ksfixtures/database.py
+++ b/keystone-moon/keystone/tests/unit/ksfixtures/database.py
@@ -13,15 +13,12 @@
import functools
import os
-import shutil
import fixtures
from oslo_config import cfg
from oslo_db import options as db_options
-from oslo_db.sqlalchemy import migration
from keystone.common import sql
-from keystone.common.sql import migration_helpers
from keystone.tests import unit as tests
@@ -42,23 +39,6 @@ def run_once(f):
return wrapper
-def _setup_database(extensions=None):
- if CONF.database.connection != tests.IN_MEM_DB_CONN_STRING:
- db = tests.dirs.tmp('test.db')
- pristine = tests.dirs.tmp('test.db.pristine')
-
- if os.path.exists(db):
- os.unlink(db)
- if not os.path.exists(pristine):
- migration.db_sync(sql.get_engine(),
- migration_helpers.find_migrate_repo())
- for extension in (extensions or []):
- migration_helpers.sync_database_to_version(extension=extension)
- shutil.copyfile(db, pristine)
- else:
- shutil.copyfile(pristine, db)
-
-
# NOTE(I159): Every execution all the options will be cleared. The method must
# be called at the every fixture initialization.
def initialize_sql_session():
@@ -108,17 +88,18 @@ class Database(fixtures.Fixture):
"""
- def __init__(self, extensions=None):
+ def __init__(self):
super(Database, self).__init__()
- self._extensions = extensions
initialize_sql_session()
_load_sqlalchemy_models()
def setUp(self):
super(Database, self).setUp()
- _setup_database(extensions=self._extensions)
self.engine = sql.get_engine()
- sql.ModelBase.metadata.create_all(bind=self.engine)
self.addCleanup(sql.cleanup)
+ sql.ModelBase.metadata.create_all(bind=self.engine)
self.addCleanup(sql.ModelBase.metadata.drop_all, bind=self.engine)
+
+ def recreate(self):
+ sql.ModelBase.metadata.create_all(bind=self.engine)
diff --git a/keystone-moon/keystone/tests/unit/ksfixtures/hacking.py b/keystone-moon/keystone/tests/unit/ksfixtures/hacking.py
index 47ef6b4b..918087ad 100644
--- a/keystone-moon/keystone/tests/unit/ksfixtures/hacking.py
+++ b/keystone-moon/keystone/tests/unit/ksfixtures/hacking.py
@@ -118,8 +118,8 @@ class HackingCode(fixtures.Fixture):
import logging as stlib_logging
from keystone.i18n import _
from keystone.i18n import _ as oslo_i18n
- from keystone.openstack.common import log
- from keystone.openstack.common import log as oslo_logging
+ from oslo_log import log
+ from oslo_log import log as oslo_logging
# stdlib logging
L0 = logging.getLogger()
@@ -138,7 +138,7 @@ class HackingCode(fixtures.Fixture):
)
# oslo logging and specifying a logger
- L2 = log.getLogger(__name__)
+ L2 = logging.getLogger(__name__)
L2.debug(oslo_i18n('text'))
# oslo logging w/ alias
@@ -179,84 +179,6 @@ class HackingCode(fixtures.Fixture):
]
}
- oslo_namespace_imports = {
- 'code': """
- import oslo.utils
- import oslo_utils
- import oslo.utils.encodeutils
- import oslo_utils.encodeutils
- from oslo import utils
- from oslo.utils import encodeutils
- from oslo_utils import encodeutils
-
- import oslo.serialization
- import oslo_serialization
- import oslo.serialization.jsonutils
- import oslo_serialization.jsonutils
- from oslo import serialization
- from oslo.serialization import jsonutils
- from oslo_serialization import jsonutils
-
- import oslo.messaging
- import oslo_messaging
- import oslo.messaging.conffixture
- import oslo_messaging.conffixture
- from oslo import messaging
- from oslo.messaging import conffixture
- from oslo_messaging import conffixture
-
- import oslo.db
- import oslo_db
- import oslo.db.api
- import oslo_db.api
- from oslo import db
- from oslo.db import api
- from oslo_db import api
-
- import oslo.config
- import oslo_config
- import oslo.config.cfg
- import oslo_config.cfg
- from oslo import config
- from oslo.config import cfg
- from oslo_config import cfg
-
- import oslo.i18n
- import oslo_i18n
- import oslo.i18n.log
- import oslo_i18n.log
- from oslo import i18n
- from oslo.i18n import log
- from oslo_i18n import log
- """,
- 'expected_errors': [
- (1, 0, 'K333'),
- (3, 0, 'K333'),
- (5, 0, 'K333'),
- (6, 0, 'K333'),
- (9, 0, 'K333'),
- (11, 0, 'K333'),
- (13, 0, 'K333'),
- (14, 0, 'K333'),
- (17, 0, 'K333'),
- (19, 0, 'K333'),
- (21, 0, 'K333'),
- (22, 0, 'K333'),
- (25, 0, 'K333'),
- (27, 0, 'K333'),
- (29, 0, 'K333'),
- (30, 0, 'K333'),
- (33, 0, 'K333'),
- (35, 0, 'K333'),
- (37, 0, 'K333'),
- (38, 0, 'K333'),
- (41, 0, 'K333'),
- (43, 0, 'K333'),
- (45, 0, 'K333'),
- (46, 0, 'K333'),
- ],
- }
-
dict_constructor = {
'code': """
lower_res = {k.lower(): v for k, v in six.iteritems(res[1])}
@@ -285,8 +207,8 @@ class HackingLogging(fixtures.Fixture):
from keystone.i18n import _LE as error_hint
from keystone.i18n import _LI
from keystone.i18n import _LW
- from keystone.openstack.common import log
- from keystone.openstack.common import log as oslo_logging
+ from oslo_log import log
+ from oslo_log import log as oslo_logging
"""
examples = [
diff --git a/keystone-moon/keystone/tests/unit/ksfixtures/key_repository.py b/keystone-moon/keystone/tests/unit/ksfixtures/key_repository.py
index d1ac2ab4..7784bddc 100644
--- a/keystone-moon/keystone/tests/unit/ksfixtures/key_repository.py
+++ b/keystone-moon/keystone/tests/unit/ksfixtures/key_repository.py
@@ -10,9 +10,6 @@
# License for the specific language governing permissions and limitations
# under the License.
-import shutil
-import tempfile
-
import fixtures
from keystone.token.providers.fernet import utils
@@ -25,8 +22,7 @@ class KeyRepository(fixtures.Fixture):
def setUp(self):
super(KeyRepository, self).setUp()
- directory = tempfile.mkdtemp()
- self.addCleanup(shutil.rmtree, directory)
+ directory = self.useFixture(fixtures.TempDir()).path
self.config_fixture.config(group='fernet_tokens',
key_repository=directory)
diff --git a/keystone-moon/keystone/tests/unit/ksfixtures/ldapdb.py b/keystone-moon/keystone/tests/unit/ksfixtures/ldapdb.py
new file mode 100644
index 00000000..b2cbe067
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/ksfixtures/ldapdb.py
@@ -0,0 +1,36 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import fixtures
+
+from keystone.common import ldap as common_ldap
+from keystone.common.ldap import core as common_ldap_core
+from keystone.tests.unit import fakeldap
+
+
+class LDAPDatabase(fixtures.Fixture):
+ """A fixture for setting up and tearing down an LDAP database.
+ """
+
+ def setUp(self):
+ super(LDAPDatabase, self).setUp()
+ self.clear()
+ common_ldap_core._HANDLERS.clear()
+ common_ldap.register_handler('fake://', fakeldap.FakeLdap)
+ # TODO(dstanek): switch the flow here
+ self.addCleanup(self.clear)
+ self.addCleanup(common_ldap_core._HANDLERS.clear)
+
+ def clear(self):
+ for shelf in fakeldap.FakeShelves:
+ fakeldap.FakeShelves[shelf].clear()
diff --git a/keystone-moon/keystone/tests/unit/mapping_fixtures.py b/keystone-moon/keystone/tests/unit/mapping_fixtures.py
index 0892ada5..f86d9245 100644
--- a/keystone-moon/keystone/tests/unit/mapping_fixtures.py
+++ b/keystone-moon/keystone/tests/unit/mapping_fixtures.py
@@ -12,6 +12,9 @@
"""Fixtures for Federation Mapping."""
+from six.moves import range, zip
+
+
EMPLOYEE_GROUP_ID = "0cd5e9"
CONTRACTOR_GROUP_ID = "85a868"
TESTER_GROUP_ID = "123"
@@ -786,6 +789,7 @@ MAPPING_USER_IDS = {
{
"user": {
"name": "{0}",
+ "id": "abc123@example.com",
"domain": {
"id": "federated"
}
@@ -828,7 +832,7 @@ MAPPING_USER_IDS = {
"local": [
{
"user": {
- "id": "abc123",
+ "id": "abc123@example.com",
"name": "{0}",
"domain": {
"id": "federated"
@@ -963,6 +967,7 @@ TESTER_ASSERTION = {
}
ANOTHER_TESTER_ASSERTION = {
+ 'Email': 'testacct@example.com',
'UserName': 'IamTester'
}
@@ -989,8 +994,8 @@ MALFORMED_TESTER_ASSERTION = {
'LastName': 'Account',
'orgPersonType': 'Tester',
'object': object(),
- 'dictionary': dict(zip('teststring', xrange(10))),
- 'tuple': tuple(xrange(5))
+ 'dictionary': dict(zip('teststring', range(10))),
+ 'tuple': tuple(range(5))
}
DEVELOPER_ASSERTION = {
diff --git a/keystone-moon/keystone/tests/unit/rest.py b/keystone-moon/keystone/tests/unit/rest.py
index 16513024..bfa52354 100644
--- a/keystone-moon/keystone/tests/unit/rest.py
+++ b/keystone-moon/keystone/tests/unit/rest.py
@@ -13,7 +13,6 @@
# under the License.
from oslo_serialization import jsonutils
-import six
import webtest
from keystone.auth import controllers as auth_controllers
@@ -61,7 +60,7 @@ class RestfulTestCase(tests.TestCase):
# Will need to reset the plug-ins
self.addCleanup(setattr, auth_controllers, 'AUTH_METHODS', {})
- self.useFixture(database.Database(extensions=self.get_extensions()))
+ self.useFixture(database.Database())
self.load_backends()
self.load_fixtures(default_fixtures)
@@ -75,7 +74,7 @@ class RestfulTestCase(tests.TestCase):
def request(self, app, path, body=None, headers=None, token=None,
expected_status=None, **kwargs):
if headers:
- headers = {str(k): str(v) for k, v in six.iteritems(headers)}
+ headers = {str(k): str(v) for k, v in headers.items()}
else:
headers = {}
@@ -119,7 +118,7 @@ class RestfulTestCase(tests.TestCase):
self.assertEqual(
response.status_code,
expected_status,
- 'Status code %s is not %s, as expected)\n\n%s' %
+ 'Status code %s is not %s, as expected\n\n%s' %
(response.status_code, expected_status, response.body))
def assertValidResponseHeaders(self, response):
diff --git a/keystone-moon/keystone/tests/unit/saml2/signed_saml2_assertion.xml b/keystone-moon/keystone/tests/unit/saml2/signed_saml2_assertion.xml
index 410f9388..414ff9cf 100644
--- a/keystone-moon/keystone/tests/unit/saml2/signed_saml2_assertion.xml
+++ b/keystone-moon/keystone/tests/unit/saml2/signed_saml2_assertion.xml
@@ -49,15 +49,21 @@ UHeBXxQq/GmfBv3l+V5ObQ+EHKnyDodLHCk=</ns1:X509Certificate>
</ns0:AuthnContext>
</ns0:AuthnStatement>
<ns0:AttributeStatement>
- <ns0:Attribute FriendlyName="keystone_user" Name="user" NameFormat="urn:oasis:names:tc:SAML:2.0:attrname-format:uri">
+ <ns0:Attribute Name="openstack_user" NameFormat="urn:oasis:names:tc:SAML:2.0:attrname-format:uri">
<ns0:AttributeValue xsi:type="xs:string">test_user</ns0:AttributeValue>
</ns0:Attribute>
- <ns0:Attribute FriendlyName="keystone_roles" Name="roles" NameFormat="urn:oasis:names:tc:SAML:2.0:attrname-format:uri">
+ <ns0:Attribute Name="openstack_user_domain" NameFormat="urn:oasis:names:tc:SAML:2.0:attrname-format:uri">
+ <ns0:AttributeValue xsi:type="xs:string">user_domain</ns0:AttributeValue>
+ </ns0:Attribute>
+ <ns0:Attribute Name="openstack_roles" NameFormat="urn:oasis:names:tc:SAML:2.0:attrname-format:uri">
<ns0:AttributeValue xsi:type="xs:string">admin</ns0:AttributeValue>
<ns0:AttributeValue xsi:type="xs:string">member</ns0:AttributeValue>
</ns0:Attribute>
- <ns0:Attribute FriendlyName="keystone_project" Name="project" NameFormat="urn:oasis:names:tc:SAML:2.0:attrname-format:uri">
+ <ns0:Attribute Name="openstack_project" NameFormat="urn:oasis:names:tc:SAML:2.0:attrname-format:uri">
<ns0:AttributeValue xsi:type="xs:string">development</ns0:AttributeValue>
</ns0:Attribute>
+ <ns0:Attribute Name="openstack_project_domain" NameFormat="urn:oasis:names:tc:SAML:2.0:attrname-format:uri">
+ <ns0:AttributeValue xsi:type="xs:string">project_domain</ns0:AttributeValue>
+ </ns0:Attribute>
</ns0:AttributeStatement>
</ns0:Assertion>
diff --git a/keystone-moon/keystone/tests/unit/test_associate_project_endpoint_extension.py b/keystone-moon/keystone/tests/unit/test_associate_project_endpoint_extension.py
index e0159b76..9cde704e 100644
--- a/keystone-moon/keystone/tests/unit/test_associate_project_endpoint_extension.py
+++ b/keystone-moon/keystone/tests/unit/test_associate_project_endpoint_extension.py
@@ -17,8 +17,6 @@ import uuid
from testtools import matchers
-# NOTE(morganfainberg): import endpoint filter to populate the SQL model
-from keystone.contrib import endpoint_filter # noqa
from keystone.tests.unit import test_v3
@@ -30,9 +28,7 @@ class TestExtensionCase(test_v3.RestfulTestCase):
def config_overrides(self):
super(TestExtensionCase, self).config_overrides()
self.config_fixture.config(
- group='catalog',
- driver='keystone.contrib.endpoint_filter.backends.catalog_sql.'
- 'EndpointFilterCatalog')
+ group='catalog', driver='endpoint_filter.sql')
def setUp(self):
super(TestExtensionCase, self).setUp()
@@ -52,7 +48,6 @@ class EndpointFilterCRUDTestCase(TestExtensionCase):
"""
self.put(self.default_request_url,
- body='',
expected_status=204)
def test_create_endpoint_project_association_with_invalid_project(self):
@@ -65,7 +60,6 @@ class EndpointFilterCRUDTestCase(TestExtensionCase):
'/endpoints/%(endpoint_id)s' % {
'project_id': uuid.uuid4().hex,
'endpoint_id': self.endpoint_id},
- body='',
expected_status=404)
def test_create_endpoint_project_association_with_invalid_endpoint(self):
@@ -78,7 +72,6 @@ class EndpointFilterCRUDTestCase(TestExtensionCase):
'/endpoints/%(endpoint_id)s' % {
'project_id': self.default_domain_project_id,
'endpoint_id': uuid.uuid4().hex},
- body='',
expected_status=404)
def test_create_endpoint_project_association_with_unexpected_body(self):
@@ -98,7 +91,6 @@ class EndpointFilterCRUDTestCase(TestExtensionCase):
"""
self.put(self.default_request_url,
- body='',
expected_status=204)
self.head('/OS-EP-FILTER/projects/%(project_id)s'
'/endpoints/%(endpoint_id)s' % {
@@ -117,7 +109,6 @@ class EndpointFilterCRUDTestCase(TestExtensionCase):
'/endpoints/%(endpoint_id)s' % {
'project_id': uuid.uuid4().hex,
'endpoint_id': self.endpoint_id},
- body='',
expected_status=404)
def test_check_endpoint_project_association_with_invalid_endpoint(self):
@@ -131,7 +122,6 @@ class EndpointFilterCRUDTestCase(TestExtensionCase):
'/endpoints/%(endpoint_id)s' % {
'project_id': self.default_domain_project_id,
'endpoint_id': uuid.uuid4().hex},
- body='',
expected_status=404)
def test_list_endpoints_associated_with_valid_project(self):
@@ -156,7 +146,6 @@ class EndpointFilterCRUDTestCase(TestExtensionCase):
self.put(self.default_request_url)
self.get('/OS-EP-FILTER/projects/%(project_id)s/endpoints' % {
'project_id': uuid.uuid4().hex},
- body='',
expected_status=404)
def test_list_projects_associated_with_endpoint(self):
@@ -217,7 +206,6 @@ class EndpointFilterCRUDTestCase(TestExtensionCase):
'/endpoints/%(endpoint_id)s' % {
'project_id': uuid.uuid4().hex,
'endpoint_id': self.endpoint_id},
- body='',
expected_status=404)
def test_remove_endpoint_project_association_with_invalid_endpoint(self):
@@ -231,7 +219,6 @@ class EndpointFilterCRUDTestCase(TestExtensionCase):
'/endpoints/%(endpoint_id)s' % {
'project_id': self.default_domain_project_id,
'endpoint_id': uuid.uuid4().hex},
- body='',
expected_status=404)
def test_endpoint_project_association_cleanup_when_project_deleted(self):
@@ -289,7 +276,6 @@ class EndpointFilterTokenRequestTestCase(TestExtensionCase):
'/endpoints/%(endpoint_id)s' % {
'project_id': project['id'],
'endpoint_id': self.endpoint_id},
- body='',
expected_status=204)
# attempt to authenticate without requesting a project
@@ -311,7 +297,6 @@ class EndpointFilterTokenRequestTestCase(TestExtensionCase):
'/endpoints/%(endpoint_id)s' % {
'project_id': self.project['id'],
'endpoint_id': self.endpoint_id},
- body='',
expected_status=204)
auth_data = self.build_authentication_request(
@@ -327,65 +312,12 @@ class EndpointFilterTokenRequestTestCase(TestExtensionCase):
self.assertEqual(r.result['token']['project']['id'],
self.project['id'])
- def test_project_scoped_token_with_no_catalog_using_endpoint_filter(self):
- """Verify endpoint filter when project scoped token returns no catalog.
-
- Test that the project scoped token response is valid for a given
- endpoint-project association when no service catalog is returned.
-
- """
- # create a project to work with
- ref = self.new_project_ref(domain_id=self.domain_id)
- r = self.post('/projects', body={'project': ref})
- project = self.assertValidProjectResponse(r, ref)
-
- # grant the user a role on the project
- self.put(
- '/projects/%(project_id)s/users/%(user_id)s/roles/%(role_id)s' % {
- 'user_id': self.user['id'],
- 'project_id': project['id'],
- 'role_id': self.role['id']})
-
- # set the user's preferred project
- body = {'user': {'default_project_id': project['id']}}
- r = self.patch('/users/%(user_id)s' % {
- 'user_id': self.user['id']},
- body=body)
- self.assertValidUserResponse(r)
-
- # add one endpoint to the project
- self.put('/OS-EP-FILTER/projects/%(project_id)s'
- '/endpoints/%(endpoint_id)s' % {
- 'project_id': project['id'],
- 'endpoint_id': self.endpoint_id},
- body='',
- expected_status=204)
-
- # attempt to authenticate without requesting a project
- auth_data = self.build_authentication_request(
- user_id=self.user['id'],
- password=self.user['password'])
- r = self.post('/auth/tokens?nocatalog', body=auth_data)
- self.assertValidProjectScopedTokenResponse(
- r,
- require_catalog=False,
- endpoint_filter=True,
- ep_filter_assoc=1)
- self.assertEqual(r.result['token']['project']['id'], project['id'])
-
- def test_default_scoped_token_with_no_catalog_using_endpoint_filter(self):
- """Verify endpoint filter when default scoped token returns no catalog.
-
- Test that the default project scoped token response is valid for a
- given endpoint-project association when no service catalog is returned.
-
- """
- # add one endpoint to default project
+ def test_scoped_token_with_no_catalog_using_endpoint_filter(self):
+ """Verify endpoint filter does not affect no catalog."""
self.put('/OS-EP-FILTER/projects/%(project_id)s'
'/endpoints/%(endpoint_id)s' % {
'project_id': self.project['id'],
'endpoint_id': self.endpoint_id},
- body='',
expected_status=204)
auth_data = self.build_authentication_request(
@@ -395,65 +327,7 @@ class EndpointFilterTokenRequestTestCase(TestExtensionCase):
r = self.post('/auth/tokens?nocatalog', body=auth_data)
self.assertValidProjectScopedTokenResponse(
r,
- require_catalog=False,
- endpoint_filter=True,
- ep_filter_assoc=1)
- self.assertEqual(r.result['token']['project']['id'],
- self.project['id'])
-
- def test_project_scoped_token_with_no_endpoint_project_association(self):
- """Verify endpoint filter when no endpoint-project association.
-
- Test that the project scoped token response is valid when there are
- no endpoint-project associations defined.
-
- """
- # create a project to work with
- ref = self.new_project_ref(domain_id=self.domain_id)
- r = self.post('/projects', body={'project': ref})
- project = self.assertValidProjectResponse(r, ref)
-
- # grant the user a role on the project
- self.put(
- '/projects/%(project_id)s/users/%(user_id)s/roles/%(role_id)s' % {
- 'user_id': self.user['id'],
- 'project_id': project['id'],
- 'role_id': self.role['id']})
-
- # set the user's preferred project
- body = {'user': {'default_project_id': project['id']}}
- r = self.patch('/users/%(user_id)s' % {
- 'user_id': self.user['id']},
- body=body)
- self.assertValidUserResponse(r)
-
- # attempt to authenticate without requesting a project
- auth_data = self.build_authentication_request(
- user_id=self.user['id'],
- password=self.user['password'])
- r = self.post('/auth/tokens?nocatalog', body=auth_data)
- self.assertValidProjectScopedTokenResponse(
- r,
- require_catalog=False,
- endpoint_filter=True)
- self.assertEqual(r.result['token']['project']['id'], project['id'])
-
- def test_default_scoped_token_with_no_endpoint_project_association(self):
- """Verify endpoint filter when no endpoint-project association.
-
- Test that the default project scoped token response is valid when
- there are no endpoint-project associations defined.
-
- """
- auth_data = self.build_authentication_request(
- user_id=self.user['id'],
- password=self.user['password'],
- project_id=self.project['id'])
- r = self.post('/auth/tokens?nocatalog', body=auth_data)
- self.assertValidProjectScopedTokenResponse(
- r,
- require_catalog=False,
- endpoint_filter=True,)
+ require_catalog=False)
self.assertEqual(r.result['token']['project']['id'],
self.project['id'])
@@ -464,7 +338,6 @@ class EndpointFilterTokenRequestTestCase(TestExtensionCase):
'/endpoints/%(endpoint_id)s' % {
'project_id': self.project['id'],
'endpoint_id': self.endpoint_id},
- body='',
expected_status=204)
# create a second temporary endpoint
@@ -480,7 +353,6 @@ class EndpointFilterTokenRequestTestCase(TestExtensionCase):
'/endpoints/%(endpoint_id)s' % {
'project_id': self.project['id'],
'endpoint_id': self.endpoint_id2},
- body='',
expected_status=204)
# remove the temporary reference
@@ -576,6 +448,30 @@ class EndpointFilterTokenRequestTestCase(TestExtensionCase):
endpoint_filter=True,
ep_filter_assoc=2)
+ def test_get_auth_catalog_using_endpoint_filter(self):
+ # add one endpoint to default project
+ self.put('/OS-EP-FILTER/projects/%(project_id)s'
+ '/endpoints/%(endpoint_id)s' % {
+ 'project_id': self.project['id'],
+ 'endpoint_id': self.endpoint_id},
+ expected_status=204)
+
+ auth_data = self.build_authentication_request(
+ user_id=self.user['id'],
+ password=self.user['password'],
+ project_id=self.project['id'])
+ token_data = self.post('/auth/tokens', body=auth_data)
+ self.assertValidProjectScopedTokenResponse(
+ token_data,
+ require_catalog=True,
+ endpoint_filter=True,
+ ep_filter_assoc=1)
+
+ auth_catalog = self.get('/auth/catalog',
+ token=token_data.headers['X-Subject-Token'])
+ self.assertEqual(token_data.result['token']['catalog'],
+ auth_catalog.result['catalog'])
+
class JsonHomeTests(TestExtensionCase, test_v3.JsonHomeTestMixin):
JSON_HOME_DATA = {
@@ -635,6 +531,16 @@ class JsonHomeTests(TestExtensionCase, test_v3.JsonHomeTestMixin):
'ext/OS-EP-FILTER/1.0/param/endpoint_group_id',
},
},
+ 'http://docs.openstack.org/api/openstack-identity/3/ext/OS-EP-FILTER/'
+ '1.0/rel/project_endpoint_groups': {
+ 'href-template': '/OS-EP-FILTER/projects/{project_id}/'
+ 'endpoint_groups',
+ 'href-vars': {
+ 'project_id':
+ 'http://docs.openstack.org/api/openstack-identity/3/param/'
+ 'project_id',
+ },
+ },
}
@@ -883,6 +789,40 @@ class EndpointGroupCRUDTestCase(TestExtensionCase):
endpoint_group_id, project_id)
self.get(url, expected_status=404)
+ def test_list_endpoint_groups_in_project(self):
+ """GET /OS-EP-FILTER/projects/{project_id}/endpoint_groups."""
+ # create an endpoint group to work with
+ endpoint_group_id = self._create_valid_endpoint_group(
+ self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY)
+
+ # associate endpoint group with project
+ url = self._get_project_endpoint_group_url(
+ endpoint_group_id, self.project_id)
+ self.put(url)
+
+ url = ('/OS-EP-FILTER/projects/%(project_id)s/endpoint_groups' %
+ {'project_id': self.project_id})
+ response = self.get(url)
+
+ self.assertEqual(
+ endpoint_group_id,
+ response.result['endpoint_groups'][0]['id'])
+
+ def test_list_endpoint_groups_in_invalid_project(self):
+ """Test retrieving from invalid project."""
+ project_id = uuid.uuid4().hex
+ url = ('/OS-EP-FILTER/projects/%(project_id)s/endpoint_groups' %
+ {'project_id': project_id})
+ self.get(url, expected_status=404)
+
+ def test_empty_endpoint_groups_in_project(self):
+ """Test when no endpoint groups associated with the project."""
+ url = ('/OS-EP-FILTER/projects/%(project_id)s/endpoint_groups' %
+ {'project_id': self.project_id})
+ response = self.get(url)
+
+ self.assertEqual(0, len(response.result['endpoint_groups']))
+
def test_check_endpoint_group_to_project(self):
"""Test HEAD with a valid endpoint group and project association."""
endpoint_group_id = self._create_valid_endpoint_group(
@@ -1088,6 +1028,25 @@ class EndpointGroupCRUDTestCase(TestExtensionCase):
self.delete(url)
self.get(url, expected_status=404)
+ def test_remove_endpoint_group_with_project_association(self):
+ # create an endpoint group
+ endpoint_group_id = self._create_valid_endpoint_group(
+ self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY)
+
+ # create an endpoint_group project
+ project_endpoint_group_url = self._get_project_endpoint_group_url(
+ endpoint_group_id, self.default_domain_project_id)
+ self.put(project_endpoint_group_url)
+
+ # remove endpoint group, the associated endpoint_group project will
+ # be removed as well.
+ endpoint_group_url = ('/OS-EP-FILTER/endpoint_groups/'
+ '%(endpoint_group_id)s'
+ % {'endpoint_group_id': endpoint_group_id})
+ self.delete(endpoint_group_url)
+ self.get(endpoint_group_url, expected_status=404)
+ self.get(project_endpoint_group_url, expected_status=404)
+
def _create_valid_endpoint_group(self, url, body):
r = self.post(url, body=body)
return r.result['endpoint_group']['id']
diff --git a/keystone-moon/keystone/tests/unit/test_auth.py b/keystone-moon/keystone/tests/unit/test_auth.py
index 295e028d..f253b02d 100644
--- a/keystone-moon/keystone/tests/unit/test_auth.py
+++ b/keystone-moon/keystone/tests/unit/test_auth.py
@@ -18,7 +18,9 @@ import uuid
import mock
from oslo_config import cfg
+import oslo_utils.fixture
from oslo_utils import timeutils
+import six
from testtools import matchers
from keystone import assignment
@@ -74,6 +76,7 @@ class AuthTest(tests.TestCase):
def setUp(self):
self.useFixture(database.Database())
super(AuthTest, self).setUp()
+ self.time_fixture = self.useFixture(oslo_utils.fixture.TimeFixture())
self.load_backends()
self.load_fixtures(default_fixtures)
@@ -265,12 +268,12 @@ class AuthWithToken(AuthTest):
self.user_foo['id'],
self.tenant_bar['id'],
self.role_member['id'])
- # Get an unscoped tenant
+ # Get an unscoped token
body_dict = _build_user_auth(
username='FOO',
password='foo2')
unscoped_token = self.controller.authenticate({}, body_dict)
- # Get a token on BAR tenant using the unscoped tenant
+ # Get a token on BAR tenant using the unscoped token
body_dict = _build_user_auth(
token=unscoped_token["access"]["token"],
tenant_name="BAR")
@@ -281,6 +284,50 @@ class AuthWithToken(AuthTest):
self.assertEqual(self.tenant_bar['id'], tenant["id"])
self.assertThat(roles, matchers.Contains(self.role_member['id']))
+ def test_auth_scoped_token_bad_project_with_debug(self):
+ """Authenticating with an invalid project fails."""
+ # Bug 1379952 reports poor user feedback, even in debug mode,
+ # when the user accidentally passes a project name as an ID.
+ # This test intentionally does exactly that.
+ body_dict = _build_user_auth(
+ username=self.user_foo['name'],
+ password=self.user_foo['password'],
+ tenant_id=self.tenant_bar['name'])
+
+ # with debug enabled, this produces a friendly exception.
+ self.config_fixture.config(debug=True)
+ e = self.assertRaises(
+ exception.Unauthorized,
+ self.controller.authenticate,
+ {}, body_dict)
+ # explicitly verify that the error message shows that a *name* is
+ # found where an *ID* is expected
+ self.assertIn(
+ 'Project ID not found: %s' % self.tenant_bar['name'],
+ six.text_type(e))
+
+ def test_auth_scoped_token_bad_project_without_debug(self):
+ """Authenticating with an invalid project fails."""
+ # Bug 1379952 reports poor user feedback, even in debug mode,
+ # when the user accidentally passes a project name as an ID.
+ # This test intentionally does exactly that.
+ body_dict = _build_user_auth(
+ username=self.user_foo['name'],
+ password=self.user_foo['password'],
+ tenant_id=self.tenant_bar['name'])
+
+ # with debug disabled, authentication failure details are suppressed.
+ self.config_fixture.config(debug=False)
+ e = self.assertRaises(
+ exception.Unauthorized,
+ self.controller.authenticate,
+ {}, body_dict)
+ # explicitly verify that the error message details above have been
+ # suppressed.
+ self.assertNotIn(
+ 'Project ID not found: %s' % self.tenant_bar['name'],
+ six.text_type(e))
+
def test_auth_token_project_group_role(self):
"""Verify getting a token in a tenant with group roles."""
# Add a v2 style role in so we can check we get this back
@@ -448,10 +495,13 @@ class AuthWithToken(AuthTest):
body_dict = _build_user_auth(username='FOO', password='foo2')
unscoped_token = self.controller.authenticate(context, body_dict)
token_id = unscoped_token['access']['token']['id']
+ self.time_fixture.advance_time_seconds(1)
+
# get a second token
body_dict = _build_user_auth(token=unscoped_token["access"]["token"])
unscoped_token_2 = self.controller.authenticate(context, body_dict)
token_2_id = unscoped_token_2['access']['token']['id']
+ self.time_fixture.advance_time_seconds(1)
self.token_provider_api.revoke_token(token_id, revoke_chain=True)
@@ -470,10 +520,13 @@ class AuthWithToken(AuthTest):
body_dict = _build_user_auth(username='FOO', password='foo2')
unscoped_token = self.controller.authenticate(context, body_dict)
token_id = unscoped_token['access']['token']['id']
+ self.time_fixture.advance_time_seconds(1)
+
# get a second token
body_dict = _build_user_auth(token=unscoped_token["access"]["token"])
unscoped_token_2 = self.controller.authenticate(context, body_dict)
token_2_id = unscoped_token_2['access']['token']['id']
+ self.time_fixture.advance_time_seconds(1)
self.token_provider_api.revoke_token(token_2_id, revoke_chain=True)
@@ -500,13 +553,17 @@ class AuthWithToken(AuthTest):
body_dict = _build_user_auth(username='FOO', password='foo2')
unscoped_token = self.controller.authenticate(context, body_dict)
token_id = unscoped_token['access']['token']['id']
+ self.time_fixture.advance_time_seconds(1)
+
# get a second token
body_dict = _build_user_auth(
token=unscoped_token['access']['token'])
unscoped_token_2 = self.controller.authenticate(context, body_dict)
token_2_id = unscoped_token_2['access']['token']['id']
+ self.time_fixture.advance_time_seconds(1)
self.token_provider_api.revoke_token(token_id, revoke_chain=True)
+ self.time_fixture.advance_time_seconds(1)
revoke_events = self.revoke_api.list_events()
self.assertThat(revoke_events, matchers.HasLength(1))
@@ -526,15 +583,18 @@ class AuthWithToken(AuthTest):
body_dict = _build_user_auth(username='FOO', password='foo2')
unscoped_token = self.controller.authenticate(context, body_dict)
token_id = unscoped_token['access']['token']['id']
+ self.time_fixture.advance_time_seconds(1)
# get a second token
body_dict = _build_user_auth(
token=unscoped_token['access']['token'])
unscoped_token_2 = self.controller.authenticate(context, body_dict)
token_2_id = unscoped_token_2['access']['token']['id']
+ self.time_fixture.advance_time_seconds(1)
# Revoke by audit_id, no audit_info means both parent and child
# token are revoked.
self.token_provider_api.revoke_token(token_id)
+ self.time_fixture.advance_time_seconds(1)
revoke_events = self.revoke_api.list_events()
self.assertThat(revoke_events, matchers.HasLength(2))
@@ -819,9 +879,8 @@ class AuthWithTrust(AuthTest):
context, trust=self.sample_data)
def test_create_trust(self):
- expires_at = timeutils.strtime(timeutils.utcnow() +
- datetime.timedelta(minutes=10),
- fmt=TIME_FORMAT)
+ expires_at = (timeutils.utcnow() +
+ datetime.timedelta(minutes=10)).strftime(TIME_FORMAT)
new_trust = self.create_trust(self.sample_data, self.trustor['name'],
expires_at=expires_at)
self.assertEqual(self.trustor['id'], new_trust['trustor_user_id'])
@@ -848,6 +907,12 @@ class AuthWithTrust(AuthTest):
self.create_trust, self.sample_data,
self.trustor['name'], expires_at="Z")
+ def test_create_trust_expires_older_than_now(self):
+ self.assertRaises(exception.ValidationExpirationError,
+ self.create_trust, self.sample_data,
+ self.trustor['name'],
+ expires_at="2010-06-04T08:44:31.999999Z")
+
def test_create_trust_without_project_id(self):
"""Verify that trust can be created without project id and
token can be generated with that trust.
@@ -868,8 +933,8 @@ class AuthWithTrust(AuthTest):
def test_get_trust(self):
unscoped_token = self.get_unscoped_token(self.trustor['name'])
- context = {'token_id': unscoped_token['access']['token']['id'],
- 'host_url': HOST_URL}
+ context = self._create_auth_context(
+ unscoped_token['access']['token']['id'])
new_trust = self.trust_controller.create_trust(
context, trust=self.sample_data)['trust']
trust = self.trust_controller.get_trust(context,
@@ -880,6 +945,21 @@ class AuthWithTrust(AuthTest):
for role in new_trust['roles']:
self.assertIn(role['id'], role_ids)
+ def test_get_trust_without_auth_context(self):
+ """Verify that a trust cannot be retrieved when the auth context is
+ missing.
+ """
+ unscoped_token = self.get_unscoped_token(self.trustor['name'])
+ context = self._create_auth_context(
+ unscoped_token['access']['token']['id'])
+ new_trust = self.trust_controller.create_trust(
+ context, trust=self.sample_data)['trust']
+ # Delete the auth context before calling get_trust().
+ del context['environment'][authorization.AUTH_CONTEXT_ENV]
+ self.assertRaises(exception.Forbidden,
+ self.trust_controller.get_trust, context,
+ new_trust['id'])
+
def test_create_trust_no_impersonation(self):
new_trust = self.create_trust(self.sample_data, self.trustor['name'],
expires_at=None, impersonation=False)
@@ -1051,13 +1131,18 @@ class AuthWithTrust(AuthTest):
self.controller.authenticate, {}, request_body)
def test_expired_trust_get_token_fails(self):
- expiry = "1999-02-18T10:10:00Z"
+ expires_at = (timeutils.utcnow() +
+ datetime.timedelta(minutes=5)).strftime(TIME_FORMAT)
+ time_expired = timeutils.utcnow() + datetime.timedelta(minutes=10)
new_trust = self.create_trust(self.sample_data, self.trustor['name'],
- expiry)
- request_body = self.build_v2_token_request('TWO', 'two2', new_trust)
- self.assertRaises(
- exception.Forbidden,
- self.controller.authenticate, {}, request_body)
+ expires_at)
+ with mock.patch.object(timeutils, 'utcnow') as mock_now:
+ mock_now.return_value = time_expired
+ request_body = self.build_v2_token_request('TWO', 'two2',
+ new_trust)
+ self.assertRaises(
+ exception.Forbidden,
+ self.controller.authenticate, {}, request_body)
def test_token_from_trust_with_wrong_role_fails(self):
new_trust = self.create_trust(self.sample_data, self.trustor['name'])
@@ -1196,9 +1281,7 @@ class TokenExpirationTest(AuthTest):
self.assertEqual(original_expiration, r['access']['token']['expires'])
def test_maintain_uuid_token_expiration(self):
- self.config_fixture.config(
- group='token',
- provider='keystone.token.providers.uuid.Provider')
+ self.config_fixture.config(group='token', provider='uuid')
self._maintain_token_expiration()
diff --git a/keystone-moon/keystone/tests/unit/test_auth_plugin.py b/keystone-moon/keystone/tests/unit/test_auth_plugin.py
index 11df95a5..a259cc2a 100644
--- a/keystone-moon/keystone/tests/unit/test_auth_plugin.py
+++ b/keystone-moon/keystone/tests/unit/test_auth_plugin.py
@@ -28,9 +28,6 @@ DEMO_USER_ID = uuid.uuid4().hex
class SimpleChallengeResponse(auth.AuthMethodHandler):
-
- method = METHOD_NAME
-
def authenticate(self, context, auth_payload, user_context):
if 'response' in auth_payload:
if auth_payload['response'] != EXPECTED_RESPONSE:
@@ -40,20 +37,6 @@ class SimpleChallengeResponse(auth.AuthMethodHandler):
return {"challenge": "What's the name of your high school?"}
-class DuplicateAuthPlugin(SimpleChallengeResponse):
- """Duplicate simple challenge response auth plugin."""
-
-
-class MismatchedAuthPlugin(SimpleChallengeResponse):
- method = uuid.uuid4().hex
-
-
-class NoMethodAuthPlugin(auth.AuthMethodHandler):
- """An auth plugin that does not supply a method attribute."""
- def authenticate(self, context, auth_payload, auth_context):
- pass
-
-
class TestAuthPlugin(tests.SQLDriverOverrides, tests.TestCase):
def setUp(self):
super(TestAuthPlugin, self).setUp()
@@ -64,9 +47,6 @@ class TestAuthPlugin(tests.SQLDriverOverrides, tests.TestCase):
def config_overrides(self):
super(TestAuthPlugin, self).config_overrides()
method_opts = {
- 'external': 'keystone.auth.plugins.external.DefaultDomain',
- 'password': 'keystone.auth.plugins.password.Password',
- 'token': 'keystone.auth.plugins.token.Token',
METHOD_NAME:
'keystone.tests.unit.test_auth_plugin.SimpleChallengeResponse',
}
@@ -123,6 +103,14 @@ class TestAuthPlugin(tests.SQLDriverOverrides, tests.TestCase):
auth_info,
auth_context)
+ def test_duplicate_method(self):
+ # Having the same method twice doesn't cause load_auth_methods to fail.
+ self.auth_plugin_config_override(
+ methods=['external', 'external'])
+ self.clear_auth_plugin_registry()
+ auth.controllers.load_auth_methods()
+ self.assertIn('external', auth.controllers.AUTH_METHODS)
+
class TestAuthPluginDynamicOptions(TestAuthPlugin):
def config_overrides(self):
@@ -137,25 +125,6 @@ class TestAuthPluginDynamicOptions(TestAuthPlugin):
return config_files
-class TestInvalidAuthMethodRegistration(tests.TestCase):
- def test_duplicate_auth_method_registration(self):
- self.config_fixture.config(
- group='auth',
- methods=[
- 'keystone.tests.unit.test_auth_plugin.SimpleChallengeResponse',
- 'keystone.tests.unit.test_auth_plugin.DuplicateAuthPlugin'])
- self.clear_auth_plugin_registry()
- self.assertRaises(ValueError, auth.controllers.load_auth_methods)
-
- def test_no_method_attribute_auth_method_by_class_name_registration(self):
- self.config_fixture.config(
- group='auth',
- methods=['keystone.tests.unit.test_auth_plugin.NoMethodAuthPlugin']
- )
- self.clear_auth_plugin_registry()
- self.assertRaises(ValueError, auth.controllers.load_auth_methods)
-
-
class TestMapped(tests.TestCase):
def setUp(self):
super(TestMapped, self).setUp()
@@ -168,8 +137,9 @@ class TestMapped(tests.TestCase):
config_files.append(tests.dirs.tests_conf('test_auth_plugin.conf'))
return config_files
- def config_overrides(self):
- # don't override configs so we can use test_auth_plugin.conf only
+ def auth_plugin_config_override(self, methods=None, **method_classes):
+ # Do not apply the auth plugin overrides so that the config file is
+ # tested
pass
def _test_mapped_invocation_with_method_name(self, method_name):
diff --git a/keystone-moon/keystone/tests/unit/test_backend.py b/keystone-moon/keystone/tests/unit/test_backend.py
index 6cf06494..45b8e0b0 100644
--- a/keystone-moon/keystone/tests/unit/test_backend.py
+++ b/keystone-moon/keystone/tests/unit/test_backend.py
@@ -22,6 +22,7 @@ import mock
from oslo_config import cfg
from oslo_utils import timeutils
import six
+from six.moves import range
from testtools import matchers
from keystone.catalog import core
@@ -505,7 +506,7 @@ class IdentityTests(object):
'fake2')
def test_list_role_assignments_unfiltered(self):
- """Test for unfiltered listing role assignments.
+ """Test unfiltered listing of role assignments.
Test Plan:
@@ -533,9 +534,6 @@ class IdentityTests(object):
# First check how many role grants already exist
existing_assignments = len(self.assignment_api.list_role_assignments())
- existing_assignments_for_role = len(
- self.assignment_api.list_role_assignments_for_role(
- role_id='admin'))
# Now create the grants (roles are defined in default_fixtures)
self.assignment_api.create_grant(user_id=new_user['id'],
@@ -573,6 +571,48 @@ class IdentityTests(object):
'role_id': 'admin'},
assignment_list)
+ def test_list_role_assignments_filtered_by_role(self):
+ """Test listing of role assignments filtered by role ID.
+
+ Test Plan:
+
+ - Create a user, group & project
+ - Find how many role assignments already exist (from default
+ fixtures)
+ - Create a grant of each type (user/group on project/domain)
+ - Check that if we list assignments by role_id, then we get back
+ assignments that only contain that role.
+
+ """
+ new_user = {'name': uuid.uuid4().hex, 'password': uuid.uuid4().hex,
+ 'enabled': True, 'domain_id': DEFAULT_DOMAIN_ID}
+ new_user = self.identity_api.create_user(new_user)
+ new_group = {'domain_id': DEFAULT_DOMAIN_ID, 'name': uuid.uuid4().hex}
+ new_group = self.identity_api.create_group(new_group)
+ new_project = {'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'domain_id': DEFAULT_DOMAIN_ID}
+ self.resource_api.create_project(new_project['id'], new_project)
+
+ # First check how many role grants already exist
+ existing_assignments_for_role = len(
+ self.assignment_api.list_role_assignments_for_role(
+ role_id='admin'))
+
+ # Now create the grants (roles are defined in default_fixtures)
+ self.assignment_api.create_grant(user_id=new_user['id'],
+ domain_id=DEFAULT_DOMAIN_ID,
+ role_id='member')
+ self.assignment_api.create_grant(user_id=new_user['id'],
+ project_id=new_project['id'],
+ role_id='other')
+ self.assignment_api.create_grant(group_id=new_group['id'],
+ domain_id=DEFAULT_DOMAIN_ID,
+ role_id='admin')
+ self.assignment_api.create_grant(group_id=new_group['id'],
+ project_id=new_project['id'],
+ role_id='admin')
+
# Read back the list of assignments for just the admin role, checking
# this only goes up by two.
assignment_list = self.assignment_api.list_role_assignments_for_role(
@@ -582,7 +622,7 @@ class IdentityTests(object):
# Now check that each of our two new entries are in the list
self.assertIn(
- {'group_id': new_group['id'], 'domain_id': new_domain['id'],
+ {'group_id': new_group['id'], 'domain_id': DEFAULT_DOMAIN_ID,
'role_id': 'admin'},
assignment_list)
self.assertIn(
@@ -598,8 +638,7 @@ class IdentityTests(object):
def get_member_assignments():
assignments = self.assignment_api.list_role_assignments()
- return filter(lambda x: x['role_id'] == MEMBER_ROLE_ID,
- assignments)
+ return [x for x in assignments if x['role_id'] == MEMBER_ROLE_ID]
orig_member_assignments = get_member_assignments()
@@ -627,8 +666,8 @@ class IdentityTests(object):
expected_member_assignments = orig_member_assignments + [{
'group_id': new_group['id'], 'project_id': new_project['id'],
'role_id': MEMBER_ROLE_ID}]
- self.assertThat(new_member_assignments,
- matchers.Equals(expected_member_assignments))
+ self.assertItemsEqual(expected_member_assignments,
+ new_member_assignments)
def test_list_role_assignments_bad_role(self):
assignment_list = self.assignment_api.list_role_assignments_for_role(
@@ -1976,6 +2015,16 @@ class IdentityTests(object):
project['id'],
project)
+ def test_create_project_invalid_domain_id(self):
+ project = {'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'domain_id': uuid.uuid4().hex,
+ 'enabled': True}
+ self.assertRaises(exception.DomainNotFound,
+ self.resource_api.create_project,
+ project['id'],
+ project)
+
def test_create_user_invalid_enabled_type_string(self):
user = {'name': uuid.uuid4().hex,
'domain_id': DEFAULT_DOMAIN_ID,
@@ -2079,7 +2128,7 @@ class IdentityTests(object):
# Create a project
project = {'id': uuid.uuid4().hex, 'domain_id': DEFAULT_DOMAIN_ID,
'name': uuid.uuid4().hex, 'description': uuid.uuid4().hex,
- 'enabled': True, 'parent_id': None}
+ 'enabled': True, 'parent_id': None, 'is_domain': False}
self.resource_api.create_project(project['id'], project)
# Build driver hints with the project's name and inexistent description
@@ -2131,12 +2180,15 @@ class IdentityTests(object):
self.assertIn(project2['id'], project_ids)
def _create_projects_hierarchy(self, hierarchy_size=2,
- domain_id=DEFAULT_DOMAIN_ID):
+ domain_id=DEFAULT_DOMAIN_ID,
+ is_domain=False):
"""Creates a project hierarchy with specified size.
:param hierarchy_size: the desired hierarchy size, default is 2 -
a project with one child.
:param domain_id: domain where the projects hierarchy will be created.
+ :param is_domain: if the hierarchy will have the is_domain flag active
+ or not.
:returns projects: a list of the projects in the created hierarchy.
@@ -2144,26 +2196,195 @@ class IdentityTests(object):
project_id = uuid.uuid4().hex
project = {'id': project_id,
'description': '',
- 'domain_id': domain_id,
'enabled': True,
'name': uuid.uuid4().hex,
- 'parent_id': None}
+ 'parent_id': None,
+ 'domain_id': domain_id,
+ 'is_domain': is_domain}
self.resource_api.create_project(project_id, project)
projects = [project]
for i in range(1, hierarchy_size):
new_project = {'id': uuid.uuid4().hex,
'description': '',
- 'domain_id': domain_id,
'enabled': True,
'name': uuid.uuid4().hex,
- 'parent_id': project_id}
+ 'parent_id': project_id,
+ 'is_domain': is_domain}
+ new_project['domain_id'] = domain_id
+
self.resource_api.create_project(new_project['id'], new_project)
projects.append(new_project)
project_id = new_project['id']
return projects
+ @tests.skip_if_no_multiple_domains_support
+ def test_create_domain_with_project_api(self):
+ project_id = uuid.uuid4().hex
+ project = {'id': project_id,
+ 'description': '',
+ 'domain_id': DEFAULT_DOMAIN_ID,
+ 'enabled': True,
+ 'name': uuid.uuid4().hex,
+ 'parent_id': None,
+ 'is_domain': True}
+ ref = self.resource_api.create_project(project['id'], project)
+ self.assertTrue(ref['is_domain'])
+ self.assertEqual(DEFAULT_DOMAIN_ID, ref['domain_id'])
+
+ @tests.skip_if_no_multiple_domains_support
+ @test_utils.wip('waiting for projects acting as domains implementation')
+ def test_is_domain_sub_project_has_parent_domain_id(self):
+ project = {'id': uuid.uuid4().hex,
+ 'description': '',
+ 'domain_id': DEFAULT_DOMAIN_ID,
+ 'enabled': True,
+ 'name': uuid.uuid4().hex,
+ 'parent_id': None,
+ 'is_domain': True}
+ self.resource_api.create_project(project['id'], project)
+
+ sub_project_id = uuid.uuid4().hex
+ sub_project = {'id': sub_project_id,
+ 'description': '',
+ 'domain_id': project['id'],
+ 'enabled': True,
+ 'name': uuid.uuid4().hex,
+ 'parent_id': project['id'],
+ 'is_domain': True}
+ ref = self.resource_api.create_project(sub_project['id'], sub_project)
+ self.assertTrue(ref['is_domain'])
+ self.assertEqual(project['id'], ref['parent_id'])
+ self.assertEqual(project['id'], ref['domain_id'])
+
+ @tests.skip_if_no_multiple_domains_support
+ @test_utils.wip('waiting for projects acting as domains implementation')
+ def test_delete_domain_with_project_api(self):
+ project_id = uuid.uuid4().hex
+ project = {'id': project_id,
+ 'description': '',
+ 'domain_id': None,
+ 'enabled': True,
+ 'name': uuid.uuid4().hex,
+ 'parent_id': None,
+ 'is_domain': True}
+ self.resource_api.create_project(project['id'], project)
+
+ # Try to delete is_domain project that is enabled
+ self.assertRaises(exception.ValidationError,
+ self.resource_api.delete_project,
+ project['id'])
+
+ # Disable the project
+ project['enabled'] = False
+ self.resource_api.update_project(project['id'], project)
+
+ # Successfuly delete the project
+ self.resource_api.delete_project(project['id'])
+
+ @tests.skip_if_no_multiple_domains_support
+ @test_utils.wip('waiting for projects acting as domains implementation')
+ def test_create_domain_under_regular_project_hierarchy_fails(self):
+ # Creating a regular project hierarchy. Projects acting as domains
+ # can't have a parent that is a regular project.
+ projects_hierarchy = self._create_projects_hierarchy()
+ parent = projects_hierarchy[1]
+ project_id = uuid.uuid4().hex
+ project = {'id': project_id,
+ 'description': '',
+ 'domain_id': parent['id'],
+ 'enabled': True,
+ 'name': uuid.uuid4().hex,
+ 'parent_id': parent['id'],
+ 'is_domain': True}
+
+ self.assertRaises(exception.ValidationError,
+ self.resource_api.create_project,
+ project['id'], project)
+
+ @tests.skip_if_no_multiple_domains_support
+ @test_utils.wip('waiting for projects acting as domains implementation')
+ def test_create_project_under_domain_hierarchy(self):
+ projects_hierarchy = self._create_projects_hierarchy(is_domain=True)
+ parent = projects_hierarchy[1]
+ project = {'id': uuid.uuid4().hex,
+ 'description': '',
+ 'domain_id': parent['id'],
+ 'enabled': True,
+ 'name': uuid.uuid4().hex,
+ 'parent_id': parent['id'],
+ 'is_domain': False}
+
+ ref = self.resource_api.create_project(project['id'], project)
+ self.assertFalse(ref['is_domain'])
+ self.assertEqual(parent['id'], ref['parent_id'])
+ self.assertEqual(parent['id'], ref['domain_id'])
+
+ def test_create_project_without_is_domain_flag(self):
+ project = {'id': uuid.uuid4().hex,
+ 'description': '',
+ 'domain_id': DEFAULT_DOMAIN_ID,
+ 'enabled': True,
+ 'name': uuid.uuid4().hex,
+ 'parent_id': None}
+
+ ref = self.resource_api.create_project(project['id'], project)
+ # The is_domain flag should be False by default
+ self.assertFalse(ref['is_domain'])
+
+ def test_create_is_domain_project(self):
+ project = {'id': uuid.uuid4().hex,
+ 'description': '',
+ 'domain_id': DEFAULT_DOMAIN_ID,
+ 'enabled': True,
+ 'name': uuid.uuid4().hex,
+ 'parent_id': None,
+ 'is_domain': True}
+
+ ref = self.resource_api.create_project(project['id'], project)
+ self.assertTrue(ref['is_domain'])
+
+ @test_utils.wip('waiting for projects acting as domains implementation')
+ def test_create_project_with_parent_id_and_without_domain_id(self):
+ project = {'id': uuid.uuid4().hex,
+ 'description': '',
+ 'domain_id': None,
+ 'enabled': True,
+ 'name': uuid.uuid4().hex,
+ 'parent_id': None}
+ self.resource_api.create_project(project['id'], project)
+
+ sub_project = {'id': uuid.uuid4().hex,
+ 'description': '',
+ 'enabled': True,
+ 'name': uuid.uuid4().hex,
+ 'parent_id': project['id']}
+ ref = self.resource_api.create_project(sub_project['id'], sub_project)
+
+ # The domain_id should be set to the parent domain_id
+ self.assertEqual(project['domain_id'], ref['domain_id'])
+
+ @test_utils.wip('waiting for projects acting as domains implementation')
+ def test_create_project_with_domain_id_and_without_parent_id(self):
+ project = {'id': uuid.uuid4().hex,
+ 'description': '',
+ 'domain_id': None,
+ 'enabled': True,
+ 'name': uuid.uuid4().hex,
+ 'parent_id': None}
+ self.resource_api.create_project(project['id'], project)
+
+ sub_project = {'id': uuid.uuid4().hex,
+ 'description': '',
+ 'enabled': True,
+ 'domain_id': project['id'],
+ 'name': uuid.uuid4().hex}
+ ref = self.resource_api.create_project(sub_project['id'], sub_project)
+
+ # The parent_id should be set to the domain_id
+ self.assertEqual(ref['parent_id'], project['id'])
+
def test_check_leaf_projects(self):
projects_hierarchy = self._create_projects_hierarchy()
root_project = projects_hierarchy[0]
@@ -2191,7 +2412,8 @@ class IdentityTests(object):
'domain_id': DEFAULT_DOMAIN_ID,
'enabled': True,
'name': uuid.uuid4().hex,
- 'parent_id': project2['id']}
+ 'parent_id': project2['id'],
+ 'is_domain': False}
self.resource_api.create_project(project4['id'], project4)
subtree = self.resource_api.list_projects_in_subtree(project1['id'])
@@ -2208,6 +2430,48 @@ class IdentityTests(object):
subtree = self.resource_api.list_projects_in_subtree(project3['id'])
self.assertEqual(0, len(subtree))
+ def test_list_projects_in_subtree_with_circular_reference(self):
+ project1_id = uuid.uuid4().hex
+ project2_id = uuid.uuid4().hex
+
+ project1 = {'id': project1_id,
+ 'description': '',
+ 'domain_id': DEFAULT_DOMAIN_ID,
+ 'enabled': True,
+ 'name': uuid.uuid4().hex}
+ self.resource_api.create_project(project1['id'], project1)
+
+ project2 = {'id': project2_id,
+ 'description': '',
+ 'domain_id': DEFAULT_DOMAIN_ID,
+ 'enabled': True,
+ 'name': uuid.uuid4().hex,
+ 'parent_id': project1_id}
+ self.resource_api.create_project(project2['id'], project2)
+
+ project1['parent_id'] = project2_id # Adds cyclic reference
+
+ # NOTE(dstanek): The manager does not allow parent_id to be updated.
+ # Instead will directly use the driver to create the cyclic
+ # reference.
+ self.resource_api.driver.update_project(project1_id, project1)
+
+ subtree = self.resource_api.list_projects_in_subtree(project1_id)
+
+ # NOTE(dstanek): If a cyclic refence is detected the code bails
+ # and returns None instead of falling into the infinite
+ # recursion trap.
+ self.assertIsNone(subtree)
+
+ def test_list_projects_in_subtree_invalid_project_id(self):
+ self.assertRaises(exception.ValidationError,
+ self.resource_api.list_projects_in_subtree,
+ None)
+
+ self.assertRaises(exception.ProjectNotFound,
+ self.resource_api.list_projects_in_subtree,
+ uuid.uuid4().hex)
+
def test_list_project_parents(self):
projects_hierarchy = self._create_projects_hierarchy(hierarchy_size=3)
project1 = projects_hierarchy[0]
@@ -2218,7 +2482,8 @@ class IdentityTests(object):
'domain_id': DEFAULT_DOMAIN_ID,
'enabled': True,
'name': uuid.uuid4().hex,
- 'parent_id': project2['id']}
+ 'parent_id': project2['id'],
+ 'is_domain': False}
self.resource_api.create_project(project4['id'], project4)
parents1 = self.resource_api.list_project_parents(project3['id'])
@@ -2232,6 +2497,15 @@ class IdentityTests(object):
parents = self.resource_api.list_project_parents(project1['id'])
self.assertEqual(0, len(parents))
+ def test_list_project_parents_invalid_project_id(self):
+ self.assertRaises(exception.ValidationError,
+ self.resource_api.list_project_parents,
+ None)
+
+ self.assertRaises(exception.ProjectNotFound,
+ self.resource_api.list_project_parents,
+ uuid.uuid4().hex)
+
def test_delete_project_with_role_assignments(self):
tenant = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
'domain_id': DEFAULT_DOMAIN_ID}
@@ -2812,29 +3086,36 @@ class IdentityTests(object):
'description': '',
'domain_id': DEFAULT_DOMAIN_ID,
'enabled': True,
- 'parent_id': 'fake'}
+ 'parent_id': 'fake',
+ 'is_domain': False}
self.assertRaises(exception.ProjectNotFound,
self.resource_api.create_project,
project['id'],
project)
- def test_create_leaf_project_with_invalid_domain(self):
+ @tests.skip_if_no_multiple_domains_support
+ def test_create_leaf_project_with_different_domain(self):
root_project = {'id': uuid.uuid4().hex,
'name': uuid.uuid4().hex,
'description': '',
'domain_id': DEFAULT_DOMAIN_ID,
'enabled': True,
- 'parent_id': None}
+ 'parent_id': None,
+ 'is_domain': False}
self.resource_api.create_project(root_project['id'], root_project)
+ domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
+ 'enabled': True}
+ self.resource_api.create_domain(domain['id'], domain)
leaf_project = {'id': uuid.uuid4().hex,
'name': uuid.uuid4().hex,
'description': '',
- 'domain_id': 'fake',
+ 'domain_id': domain['id'],
'enabled': True,
- 'parent_id': root_project['id']}
+ 'parent_id': root_project['id'],
+ 'is_domain': False}
- self.assertRaises(exception.ForbiddenAction,
+ self.assertRaises(exception.ValidationError,
self.resource_api.create_project,
leaf_project['id'],
leaf_project)
@@ -2883,17 +3164,19 @@ class IdentityTests(object):
'name': uuid.uuid4().hex,
'domain_id': DEFAULT_DOMAIN_ID,
'enabled': False,
- 'parent_id': None}
+ 'parent_id': None,
+ 'is_domain': False}
self.resource_api.create_project(project1['id'], project1)
project2 = {'id': uuid.uuid4().hex,
'name': uuid.uuid4().hex,
'domain_id': DEFAULT_DOMAIN_ID,
- 'parent_id': project1['id']}
+ 'parent_id': project1['id'],
+ 'is_domain': False}
# It's not possible to create a project under a disabled one in the
# hierarchy
- self.assertRaises(exception.ForbiddenAction,
+ self.assertRaises(exception.ValidationError,
self.resource_api.create_project,
project2['id'],
project2)
@@ -2955,7 +3238,8 @@ class IdentityTests(object):
'id': project_id,
'name': uuid.uuid4().hex,
'domain_id': DEFAULT_DOMAIN_ID,
- 'parent_id': leaf_project['id']}
+ 'parent_id': leaf_project['id'],
+ 'is_domain': False}
self.assertRaises(exception.ForbiddenAction,
self.resource_api.create_project,
project_id,
@@ -2967,7 +3251,8 @@ class IdentityTests(object):
'name': uuid.uuid4().hex,
'domain_id': DEFAULT_DOMAIN_ID,
'enabled': True,
- 'parent_id': None}
+ 'parent_id': None,
+ 'is_domain': False}
self.resource_api.create_project(project['id'], project)
# Add a description attribute.
@@ -2983,7 +3268,8 @@ class IdentityTests(object):
'name': uuid.uuid4().hex,
'domain_id': DEFAULT_DOMAIN_ID,
'enabled': True,
- 'parent_id': None}
+ 'parent_id': None,
+ 'is_domain': False}
self.resource_api.create_project(project['id'], project)
# Add a description attribute.
@@ -3427,8 +3713,7 @@ class IdentityTests(object):
def get_member_assignments():
assignments = self.assignment_api.list_role_assignments()
- return filter(lambda x: x['role_id'] == MEMBER_ROLE_ID,
- assignments)
+ return [x for x in assignments if x['role_id'] == MEMBER_ROLE_ID]
orig_member_assignments = get_member_assignments()
@@ -3662,16 +3947,16 @@ class IdentityTests(object):
domain2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
self.resource_api.create_domain(domain2['id'], domain2)
project1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
- 'domain_id': domain1['id']}
+ 'domain_id': domain1['id'], 'is_domain': False}
project1 = self.resource_api.create_project(project1['id'], project1)
project2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
- 'domain_id': domain1['id']}
+ 'domain_id': domain1['id'], 'is_domain': False}
project2 = self.resource_api.create_project(project2['id'], project2)
project3 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
- 'domain_id': domain1['id']}
+ 'domain_id': domain1['id'], 'is_domain': False}
project3 = self.resource_api.create_project(project3['id'], project3)
project4 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
- 'domain_id': domain2['id']}
+ 'domain_id': domain2['id'], 'is_domain': False}
project4 = self.resource_api.create_project(project4['id'], project4)
group_list = []
role_list = []
@@ -4291,7 +4576,9 @@ class TrustTests(object):
trust_data = self.trust_api.get_trust(trust_id)
self.assertEqual(new_id, trust_data['id'])
self.trust_api.delete_trust(trust_id)
- self.assertIsNone(self.trust_api.get_trust(trust_id))
+ self.assertRaises(exception.TrustNotFound,
+ self.trust_api.get_trust,
+ trust_id)
def test_delete_trust_not_found(self):
trust_id = uuid.uuid4().hex
@@ -4314,7 +4601,9 @@ class TrustTests(object):
self.assertIsNotNone(trust_data)
self.assertIsNone(trust_data['deleted_at'])
self.trust_api.delete_trust(new_id)
- self.assertIsNone(self.trust_api.get_trust(new_id))
+ self.assertRaises(exception.TrustNotFound,
+ self.trust_api.get_trust,
+ new_id)
deleted_trust = self.trust_api.get_trust(trust_data['id'],
deleted=True)
self.assertEqual(trust_data['id'], deleted_trust['id'])
@@ -4389,7 +4678,9 @@ class TrustTests(object):
self.assertEqual(1, t['remaining_uses'])
self.trust_api.consume_use(trust_data['id'])
# This was the last use, the trust isn't available anymore
- self.assertIsNone(self.trust_api.get_trust(trust_data['id']))
+ self.assertRaises(exception.TrustNotFound,
+ self.trust_api.get_trust,
+ trust_data['id'])
class CatalogTests(object):
@@ -4907,7 +5198,6 @@ class CatalogTests(object):
endpoint = {
'id': uuid.uuid4().hex,
- 'region_id': None,
'service_id': service['id'],
'interface': 'public',
'url': uuid.uuid4().hex,
@@ -5007,6 +5297,29 @@ class CatalogTests(object):
return service_ref, enabled_endpoint_ref, disabled_endpoint_ref
+ def test_list_endpoints(self):
+ service = {
+ 'id': uuid.uuid4().hex,
+ 'type': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'description': uuid.uuid4().hex,
+ }
+ self.catalog_api.create_service(service['id'], service.copy())
+
+ expected_ids = set([uuid.uuid4().hex for _ in range(3)])
+ for endpoint_id in expected_ids:
+ endpoint = {
+ 'id': endpoint_id,
+ 'region_id': None,
+ 'service_id': service['id'],
+ 'interface': 'public',
+ 'url': uuid.uuid4().hex,
+ }
+ self.catalog_api.create_endpoint(endpoint['id'], endpoint.copy())
+
+ endpoints = self.catalog_api.list_endpoints()
+ self.assertEqual(expected_ids, set(e['id'] for e in endpoints))
+
def test_get_catalog_endpoint_disabled(self):
"""Get back only enabled endpoints when get the v2 catalog."""
@@ -5157,6 +5470,77 @@ class PolicyTests(object):
class InheritanceTests(object):
+ def _test_crud_inherited_and_direct_assignment(self, **kwargs):
+ """Tests inherited and direct assignments for the actor and target
+
+ Ensure it is possible to create both inherited and direct role
+ assignments for the same actor on the same target. The actor and the
+ target are specified in the kwargs as ('user_id' or 'group_id') and
+ ('project_id' or 'domain_id'), respectively.
+
+ """
+
+ # Create a new role to avoid assignments loaded from default fixtures
+ role = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ role = self.role_api.create_role(role['id'], role)
+
+ # Define the common assigment entity
+ assignment_entity = {'role_id': role['id']}
+ assignment_entity.update(kwargs)
+
+ # Define assignments under test
+ direct_assignment_entity = assignment_entity.copy()
+ inherited_assignment_entity = assignment_entity.copy()
+ inherited_assignment_entity['inherited_to_projects'] = 'projects'
+
+ # Create direct assignment and check grants
+ self.assignment_api.create_grant(inherited_to_projects=False,
+ **assignment_entity)
+
+ grants = self.assignment_api.list_role_assignments_for_role(role['id'])
+ self.assertThat(grants, matchers.HasLength(1))
+ self.assertIn(direct_assignment_entity, grants)
+
+ # Now add inherited assignment and check grants
+ self.assignment_api.create_grant(inherited_to_projects=True,
+ **assignment_entity)
+
+ grants = self.assignment_api.list_role_assignments_for_role(role['id'])
+ self.assertThat(grants, matchers.HasLength(2))
+ self.assertIn(direct_assignment_entity, grants)
+ self.assertIn(inherited_assignment_entity, grants)
+
+ # Delete both and check grants
+ self.assignment_api.delete_grant(inherited_to_projects=False,
+ **assignment_entity)
+ self.assignment_api.delete_grant(inherited_to_projects=True,
+ **assignment_entity)
+
+ grants = self.assignment_api.list_role_assignments_for_role(role['id'])
+ self.assertEqual([], grants)
+
+ def test_crud_inherited_and_direct_assignment_for_user_on_domain(self):
+ self._test_crud_inherited_and_direct_assignment(
+ user_id=self.user_foo['id'], domain_id=DEFAULT_DOMAIN_ID)
+
+ def test_crud_inherited_and_direct_assignment_for_group_on_domain(self):
+ group = {'name': uuid.uuid4().hex, 'domain_id': DEFAULT_DOMAIN_ID}
+ group = self.identity_api.create_group(group)
+
+ self._test_crud_inherited_and_direct_assignment(
+ group_id=group['id'], domain_id=DEFAULT_DOMAIN_ID)
+
+ def test_crud_inherited_and_direct_assignment_for_user_on_project(self):
+ self._test_crud_inherited_and_direct_assignment(
+ user_id=self.user_foo['id'], project_id=self.tenant_baz['id'])
+
+ def test_crud_inherited_and_direct_assignment_for_group_on_project(self):
+ group = {'name': uuid.uuid4().hex, 'domain_id': DEFAULT_DOMAIN_ID}
+ group = self.identity_api.create_group(group)
+
+ self._test_crud_inherited_and_direct_assignment(
+ group_id=group['id'], project_id=self.tenant_baz['id'])
+
def test_inherited_role_grants_for_user(self):
"""Test inherited user roles.
@@ -5375,14 +5759,16 @@ class InheritanceTests(object):
'domain_id': DEFAULT_DOMAIN_ID,
'enabled': True,
'name': uuid.uuid4().hex,
- 'parent_id': None}
+ 'parent_id': None,
+ 'is_domain': False}
self.resource_api.create_project(root_project['id'], root_project)
leaf_project = {'id': uuid.uuid4().hex,
'description': '',
'domain_id': DEFAULT_DOMAIN_ID,
'enabled': True,
'name': uuid.uuid4().hex,
- 'parent_id': root_project['id']}
+ 'parent_id': root_project['id'],
+ 'is_domain': False}
self.resource_api.create_project(leaf_project['id'], leaf_project)
user = {'name': uuid.uuid4().hex, 'password': uuid.uuid4().hex,
@@ -5496,14 +5882,16 @@ class InheritanceTests(object):
'domain_id': DEFAULT_DOMAIN_ID,
'enabled': True,
'name': uuid.uuid4().hex,
- 'parent_id': None}
+ 'parent_id': None,
+ 'is_domain': False}
self.resource_api.create_project(root_project['id'], root_project)
leaf_project = {'id': uuid.uuid4().hex,
'description': '',
'domain_id': DEFAULT_DOMAIN_ID,
'enabled': True,
'name': uuid.uuid4().hex,
- 'parent_id': root_project['id']}
+ 'parent_id': root_project['id'],
+ 'is_domain': False}
self.resource_api.create_project(leaf_project['id'], leaf_project)
user = {'name': uuid.uuid4().hex, 'password': uuid.uuid4().hex,
@@ -5663,6 +6051,65 @@ class FilterTests(filtering.FilterTests):
self._delete_test_data('user', user_list)
self._delete_test_data('group', group_list)
+ def _get_user_name_field_size(self):
+ """Return the size of the user name field for the backend.
+
+ Subclasses can override this method to indicate that the user name
+ field is limited in length. The user name is the field used in the test
+ that validates that a filter value works even if it's longer than a
+ field.
+
+ If the backend doesn't limit the value length then return None.
+
+ """
+ return None
+
+ def test_filter_value_wider_than_field(self):
+ # If a filter value is given that's larger than the field in the
+ # backend then no values are returned.
+
+ user_name_field_size = self._get_user_name_field_size()
+
+ if user_name_field_size is None:
+ # The backend doesn't limit the size of the user name, so pass this
+ # test.
+ return
+
+ # Create some users just to make sure would return something if the
+ # filter was ignored.
+ self._create_test_data('user', 2)
+
+ hints = driver_hints.Hints()
+ value = 'A' * (user_name_field_size + 1)
+ hints.add_filter('name', value)
+ users = self.identity_api.list_users(hints=hints)
+ self.assertEqual([], users)
+
+ def test_list_users_in_group_filtered(self):
+ number_of_users = 10
+ user_name_data = {
+ 1: 'Arthur Conan Doyle',
+ 3: 'Arthur Rimbaud',
+ 9: 'Arthur Schopenhauer',
+ }
+ user_list = self._create_test_data(
+ 'user', number_of_users,
+ domain_id=DEFAULT_DOMAIN_ID, name_dict=user_name_data)
+ group = self._create_one_entity('group',
+ DEFAULT_DOMAIN_ID, 'Great Writers')
+ for i in range(7):
+ self.identity_api.add_user_to_group(user_list[i]['id'],
+ group['id'])
+
+ hints = driver_hints.Hints()
+ hints.add_filter('name', 'Arthur', comparator='startswith')
+ users = self.identity_api.list_users_in_group(group['id'], hints=hints)
+ self.assertThat(len(users), matchers.Equals(2))
+ self.assertIn(user_list[1]['id'], [users[0]['id'], users[1]['id']])
+ self.assertIn(user_list[3]['id'], [users[0]['id'], users[1]['id']])
+ self._delete_test_data('user', user_list)
+ self._delete_entity('group')(group['id'])
+
class LimitTests(filtering.FilterTests):
ENTITIES = ['user', 'group', 'project']
diff --git a/keystone-moon/keystone/tests/unit/test_backend_endpoint_policy.py b/keystone-moon/keystone/tests/unit/test_backend_endpoint_policy.py
index cc41d977..6c2181aa 100644
--- a/keystone-moon/keystone/tests/unit/test_backend_endpoint_policy.py
+++ b/keystone-moon/keystone/tests/unit/test_backend_endpoint_policy.py
@@ -14,6 +14,7 @@
import uuid
+from six.moves import range
from testtools import matchers
from keystone import exception
diff --git a/keystone-moon/keystone/tests/unit/test_backend_endpoint_policy_sql.py b/keystone-moon/keystone/tests/unit/test_backend_endpoint_policy_sql.py
index dab02859..134a03f0 100644
--- a/keystone-moon/keystone/tests/unit/test_backend_endpoint_policy_sql.py
+++ b/keystone-moon/keystone/tests/unit/test_backend_endpoint_policy_sql.py
@@ -21,7 +21,8 @@ class SqlPolicyAssociationTable(test_backend_sql.SqlModels):
"""Set of tests for checking SQL Policy Association Mapping."""
def test_policy_association_mapping(self):
- cols = (('policy_id', sql.String, 64),
+ cols = (('id', sql.String, 64),
+ ('policy_id', sql.String, 64),
('endpoint_id', sql.String, 64),
('service_id', sql.String, 64),
('region_id', sql.String, 64))
diff --git a/keystone-moon/keystone/tests/unit/test_backend_federation_sql.py b/keystone-moon/keystone/tests/unit/test_backend_federation_sql.py
index 48ebad6c..995c564d 100644
--- a/keystone-moon/keystone/tests/unit/test_backend_federation_sql.py
+++ b/keystone-moon/keystone/tests/unit/test_backend_federation_sql.py
@@ -21,11 +21,15 @@ class SqlFederation(test_backend_sql.SqlModels):
def test_identity_provider(self):
cols = (('id', sql.String, 64),
- ('remote_id', sql.String, 256),
('enabled', sql.Boolean, None),
('description', sql.Text, None))
self.assertExpectedSchema('identity_provider', cols)
+ def test_idp_remote_ids(self):
+ cols = (('idp_id', sql.String, 64),
+ ('remote_id', sql.String, 255))
+ self.assertExpectedSchema('idp_remote_ids', cols)
+
def test_federated_protocol(self):
cols = (('id', sql.String, 64),
('idp_id', sql.String, 64),
@@ -42,5 +46,6 @@ class SqlFederation(test_backend_sql.SqlModels):
('id', sql.String, 64),
('enabled', sql.Boolean, None),
('description', sql.Text, None),
+ ('relay_state_prefix', sql.String, 256),
('sp_url', sql.String, 256))
self.assertExpectedSchema('service_provider', cols)
diff --git a/keystone-moon/keystone/tests/unit/test_backend_kvs.py b/keystone-moon/keystone/tests/unit/test_backend_kvs.py
index c0997ad9..a22faa59 100644
--- a/keystone-moon/keystone/tests/unit/test_backend_kvs.py
+++ b/keystone-moon/keystone/tests/unit/test_backend_kvs.py
@@ -18,6 +18,7 @@ from oslo_config import cfg
from oslo_utils import timeutils
import six
+from keystone.common import utils
from keystone import exception
from keystone.tests import unit as tests
from keystone.tests.unit import test_backend
@@ -67,13 +68,13 @@ class KvsToken(tests.TestCase, test_backend.TokenTests):
valid_token_ref = token_persistence.get_token(valid_token_id)
expired_token_ref = token_persistence.get_token(expired_token_id)
expected_user_token_list = [
- (valid_token_id, timeutils.isotime(valid_token_ref['expires'],
- subsecond=True)),
- (expired_token_id, timeutils.isotime(expired_token_ref['expires'],
- subsecond=True))]
+ (valid_token_id, utils.isotime(valid_token_ref['expires'],
+ subsecond=True)),
+ (expired_token_id, utils.isotime(expired_token_ref['expires'],
+ subsecond=True))]
self.assertEqual(expected_user_token_list, user_token_list)
new_expired_data = (expired_token_id,
- timeutils.isotime(
+ utils.isotime(
(timeutils.utcnow() - expire_delta),
subsecond=True))
self._update_user_token_index_direct(user_key, expired_token_id,
@@ -82,10 +83,10 @@ class KvsToken(tests.TestCase, test_backend.TokenTests):
user_id=user_id)
valid_token_ref_2 = token_persistence.get_token(valid_token_id_2)
expected_user_token_list = [
- (valid_token_id, timeutils.isotime(valid_token_ref['expires'],
- subsecond=True)),
- (valid_token_id_2, timeutils.isotime(valid_token_ref_2['expires'],
- subsecond=True))]
+ (valid_token_id, utils.isotime(valid_token_ref['expires'],
+ subsecond=True)),
+ (valid_token_id_2, utils.isotime(valid_token_ref_2['expires'],
+ subsecond=True))]
user_token_list = token_persistence.driver._store.get(user_key)
self.assertEqual(expected_user_token_list, user_token_list)
@@ -94,10 +95,10 @@ class KvsToken(tests.TestCase, test_backend.TokenTests):
new_token_id, data = self.create_token_sample_data(user_id=user_id)
new_token_ref = token_persistence.get_token(new_token_id)
expected_user_token_list = [
- (valid_token_id, timeutils.isotime(valid_token_ref['expires'],
- subsecond=True)),
- (new_token_id, timeutils.isotime(new_token_ref['expires'],
- subsecond=True))]
+ (valid_token_id, utils.isotime(valid_token_ref['expires'],
+ subsecond=True)),
+ (new_token_id, utils.isotime(new_token_ref['expires'],
+ subsecond=True))]
user_token_list = token_persistence.driver._store.get(user_key)
self.assertEqual(expected_user_token_list, user_token_list)
@@ -110,9 +111,7 @@ class KvsCatalog(tests.TestCase, test_backend.CatalogTests):
def config_overrides(self):
super(KvsCatalog, self).config_overrides()
- self.config_fixture.config(
- group='catalog',
- driver='keystone.catalog.backends.kvs.Catalog')
+ self.config_fixture.config(group='catalog', driver='kvs')
def _load_fake_catalog(self):
self.catalog_foobar = self.catalog_api.driver._create_catalog(
@@ -167,6 +166,4 @@ class KvsTokenCacheInvalidation(tests.TestCase,
def config_overrides(self):
super(KvsTokenCacheInvalidation, self).config_overrides()
- self.config_fixture.config(
- group='token',
- driver='keystone.token.persistence.backends.kvs.Token')
+ self.config_fixture.config(group='token', driver='kvs')
diff --git a/keystone-moon/keystone/tests/unit/test_backend_ldap.py b/keystone-moon/keystone/tests/unit/test_backend_ldap.py
index 10119808..94fb82e7 100644
--- a/keystone-moon/keystone/tests/unit/test_backend_ldap.py
+++ b/keystone-moon/keystone/tests/unit/test_backend_ldap.py
@@ -20,27 +20,92 @@ import uuid
import ldap
import mock
from oslo_config import cfg
+import pkg_resources
+from six.moves import range
from testtools import matchers
from keystone.common import cache
from keystone.common import ldap as common_ldap
from keystone.common.ldap import core as common_ldap_core
-from keystone.common import sql
from keystone import exception
from keystone import identity
from keystone.identity.mapping_backends import mapping as map
from keystone import resource
from keystone.tests import unit as tests
from keystone.tests.unit import default_fixtures
-from keystone.tests.unit import fakeldap
from keystone.tests.unit import identity_mapping as mapping_sql
from keystone.tests.unit.ksfixtures import database
+from keystone.tests.unit.ksfixtures import ldapdb
from keystone.tests.unit import test_backend
CONF = cfg.CONF
+def _assert_backends(testcase, **kwargs):
+
+ def _get_backend_cls(testcase, subsystem):
+ observed_backend = getattr(testcase, subsystem + '_api').driver
+ return observed_backend.__class__
+
+ def _get_domain_specific_backend_cls(manager, domain):
+ observed_backend = manager.domain_configs.get_domain_driver(domain)
+ return observed_backend.__class__
+
+ def _get_entrypoint_cls(subsystem, name):
+ entrypoint = entrypoint_map['keystone.' + subsystem][name]
+ return entrypoint.resolve()
+
+ def _load_domain_specific_configs(manager):
+ if (not manager.domain_configs.configured and
+ CONF.identity.domain_specific_drivers_enabled):
+ manager.domain_configs.setup_domain_drivers(
+ manager.driver, manager.resource_api)
+
+ def _assert_equal(expected_cls, observed_cls, subsystem,
+ domain=None):
+ msg = ('subsystem %(subsystem)s expected %(expected_cls)r, '
+ 'but observed %(observed_cls)r')
+ if domain:
+ subsystem = '%s[domain=%s]' % (subsystem, domain)
+ assert expected_cls == observed_cls, msg % {
+ 'expected_cls': expected_cls,
+ 'observed_cls': observed_cls,
+ 'subsystem': subsystem,
+ }
+
+ env = pkg_resources.Environment()
+ keystone_dist = env['keystone'][0]
+ entrypoint_map = pkg_resources.get_entry_map(keystone_dist)
+
+ for subsystem, entrypoint_name in kwargs.items():
+ if isinstance(entrypoint_name, str):
+ observed_cls = _get_backend_cls(testcase, subsystem)
+ expected_cls = _get_entrypoint_cls(subsystem, entrypoint_name)
+ _assert_equal(expected_cls, observed_cls, subsystem)
+
+ elif isinstance(entrypoint_name, dict):
+ manager = getattr(testcase, subsystem + '_api')
+ _load_domain_specific_configs(manager)
+
+ for domain, entrypoint_name in entrypoint_name.items():
+ if domain is None:
+ observed_cls = _get_backend_cls(testcase, subsystem)
+ expected_cls = _get_entrypoint_cls(
+ subsystem, entrypoint_name)
+ _assert_equal(expected_cls, observed_cls, subsystem)
+ continue
+
+ observed_cls = _get_domain_specific_backend_cls(
+ manager, domain)
+ expected_cls = _get_entrypoint_cls(subsystem, entrypoint_name)
+ _assert_equal(expected_cls, observed_cls, subsystem, domain)
+
+ else:
+ raise ValueError('%r is not an expected value for entrypoint name'
+ % entrypoint_name)
+
+
def create_group_container(identity_api):
# Create the groups base entry (ou=Groups,cn=example,cn=com)
group_api = identity_api.driver.group
@@ -54,35 +119,22 @@ class BaseLDAPIdentity(test_backend.IdentityTests):
def setUp(self):
super(BaseLDAPIdentity, self).setUp()
- self.clear_database()
+ self.ldapdb = self.useFixture(ldapdb.LDAPDatabase())
- common_ldap.register_handler('fake://', fakeldap.FakeLdap)
self.load_backends()
self.load_fixtures(default_fixtures)
- self.addCleanup(common_ldap_core._HANDLERS.clear)
-
def _get_domain_fixture(self):
"""Domains in LDAP are read-only, so just return the static one."""
return self.resource_api.get_domain(CONF.identity.default_domain_id)
- def clear_database(self):
- for shelf in fakeldap.FakeShelves:
- fakeldap.FakeShelves[shelf].clear()
-
- def reload_backends(self, domain_id):
- # Only one backend unless we are using separate domain backends
- self.load_backends()
-
def get_config(self, domain_id):
# Only one conf structure unless we are using separate domain backends
return CONF
def config_overrides(self):
super(BaseLDAPIdentity, self).config_overrides()
- self.config_fixture.config(
- group='identity',
- driver='keystone.identity.backends.ldap.Identity')
+ self.config_fixture.config(group='identity', driver='ldap')
def config_files(self):
config_files = super(BaseLDAPIdentity, self).config_files()
@@ -127,11 +179,11 @@ class BaseLDAPIdentity(test_backend.IdentityTests):
user['id'])
def test_configurable_forbidden_user_actions(self):
- conf = self.get_config(CONF.identity.default_domain_id)
- conf.ldap.user_allow_create = False
- conf.ldap.user_allow_update = False
- conf.ldap.user_allow_delete = False
- self.reload_backends(CONF.identity.default_domain_id)
+ driver = self.identity_api._select_identity_driver(
+ CONF.identity.default_domain_id)
+ driver.user.allow_create = False
+ driver.user.allow_update = False
+ driver.user.allow_delete = False
user = {'name': u'fäké1',
'password': u'fäképass1',
@@ -152,9 +204,9 @@ class BaseLDAPIdentity(test_backend.IdentityTests):
self.user_foo['id'])
def test_configurable_forbidden_create_existing_user(self):
- conf = self.get_config(CONF.identity.default_domain_id)
- conf.ldap.user_allow_create = False
- self.reload_backends(CONF.identity.default_domain_id)
+ driver = self.identity_api._select_identity_driver(
+ CONF.identity.default_domain_id)
+ driver.user.allow_create = False
self.assertRaises(exception.ForbiddenAction,
self.identity_api.create_user,
@@ -165,9 +217,9 @@ class BaseLDAPIdentity(test_backend.IdentityTests):
self.user_foo.pop('password')
self.assertDictEqual(user_ref, self.user_foo)
- conf = self.get_config(user_ref['domain_id'])
- conf.ldap.user_filter = '(CN=DOES_NOT_MATCH)'
- self.reload_backends(user_ref['domain_id'])
+ driver = self.identity_api._select_identity_driver(
+ user_ref['domain_id'])
+ driver.user.ldap_filter = '(CN=DOES_NOT_MATCH)'
# invalidate the cache if the result is cached.
self.identity_api.get_user.invalidate(self.identity_api,
self.user_foo['id'])
@@ -468,9 +520,16 @@ class BaseLDAPIdentity(test_backend.IdentityTests):
after_assignments = len(self.assignment_api.list_role_assignments())
self.assertEqual(existing_assignments + 2, after_assignments)
+ def test_list_role_assignments_filtered_by_role(self):
+ # Domain roles are not supported by the LDAP Assignment backend
+ self.assertRaises(
+ exception.NotImplemented,
+ super(BaseLDAPIdentity, self).
+ test_list_role_assignments_filtered_by_role)
+
def test_list_role_assignments_dumb_member(self):
self.config_fixture.config(group='ldap', use_dumb_member=True)
- self.clear_database()
+ self.ldapdb.clear()
self.load_backends()
self.load_fixtures(default_fixtures)
@@ -495,7 +554,7 @@ class BaseLDAPIdentity(test_backend.IdentityTests):
def test_list_user_ids_for_project_dumb_member(self):
self.config_fixture.config(group='ldap', use_dumb_member=True)
- self.clear_database()
+ self.ldapdb.clear()
self.load_backends()
self.load_fixtures(default_fixtures)
@@ -569,7 +628,7 @@ class BaseLDAPIdentity(test_backend.IdentityTests):
def test_list_group_members_dumb_member(self):
self.config_fixture.config(group='ldap', use_dumb_member=True)
- self.clear_database()
+ self.ldapdb.clear()
self.load_backends()
self.load_fixtures(default_fixtures)
@@ -686,11 +745,10 @@ class BaseLDAPIdentity(test_backend.IdentityTests):
def test_create_user_none_mapping(self):
# When create a user where an attribute maps to None, the entry is
# created without that attribute and it doesn't fail with a TypeError.
- conf = self.get_config(CONF.identity.default_domain_id)
- conf.ldap.user_attribute_ignore = ['enabled', 'email',
- 'tenants', 'tenantId']
- self.reload_backends(CONF.identity.default_domain_id)
-
+ driver = self.identity_api._select_identity_driver(
+ CONF.identity.default_domain_id)
+ driver.user.attribute_ignore = ['enabled', 'email',
+ 'tenants', 'tenantId']
user = {'name': u'fäké1',
'password': u'fäképass1',
'domain_id': CONF.identity.default_domain_id,
@@ -723,10 +781,10 @@ class BaseLDAPIdentity(test_backend.IdentityTests):
# Ensure that an attribute that maps to None that is not explicitly
# ignored in configuration is implicitly ignored without triggering
# an error.
- conf = self.get_config(CONF.identity.default_domain_id)
- conf.ldap.user_attribute_ignore = ['enabled', 'email',
- 'tenants', 'tenantId']
- self.reload_backends(CONF.identity.default_domain_id)
+ driver = self.identity_api._select_identity_driver(
+ CONF.identity.default_domain_id)
+ driver.user.attribute_ignore = ['enabled', 'email',
+ 'tenants', 'tenantId']
user = {'name': u'fäké1',
'password': u'fäképass1',
@@ -930,6 +988,10 @@ class LDAPIdentity(BaseLDAPIdentity, tests.TestCase):
# credentials) that require a database.
self.useFixture(database.Database())
super(LDAPIdentity, self).setUp()
+ _assert_backends(self,
+ assignment='ldap',
+ identity='ldap',
+ resource='ldap')
def load_fixtures(self, fixtures):
# Override super impl since need to create group container.
@@ -937,7 +999,9 @@ class LDAPIdentity(BaseLDAPIdentity, tests.TestCase):
super(LDAPIdentity, self).load_fixtures(fixtures)
def test_configurable_allowed_project_actions(self):
- tenant = {'id': u'fäké1', 'name': u'fäké1', 'enabled': True}
+ domain = self._get_domain_fixture()
+ tenant = {'id': u'fäké1', 'name': u'fäké1', 'enabled': True,
+ 'domain_id': domain['id']}
self.resource_api.create_project(u'fäké1', tenant)
tenant_ref = self.resource_api.get_project(u'fäké1')
self.assertEqual(u'fäké1', tenant_ref['id'])
@@ -990,7 +1054,8 @@ class LDAPIdentity(BaseLDAPIdentity, tests.TestCase):
project_allow_update=False, project_allow_delete=False)
self.load_backends()
- tenant = {'id': u'fäké1', 'name': u'fäké1'}
+ domain = self._get_domain_fixture()
+ tenant = {'id': u'fäké1', 'name': u'fäké1', 'domain_id': domain['id']}
self.assertRaises(exception.ForbiddenAction,
self.resource_api.create_project,
u'fäké1',
@@ -1029,7 +1094,7 @@ class LDAPIdentity(BaseLDAPIdentity, tests.TestCase):
def test_dumb_member(self):
self.config_fixture.config(group='ldap', use_dumb_member=True)
- self.clear_database()
+ self.ldapdb.clear()
self.load_backends()
self.load_fixtures(default_fixtures)
dumb_id = common_ldap.BaseLdap._dn_to_id(CONF.ldap.dumb_member)
@@ -1042,7 +1107,7 @@ class LDAPIdentity(BaseLDAPIdentity, tests.TestCase):
group='ldap', project_name_attribute='ou',
project_desc_attribute='description',
project_enabled_attribute='enabled')
- self.clear_database()
+ self.ldapdb.clear()
self.load_backends()
self.load_fixtures(default_fixtures)
# NOTE(morganfainberg): CONF.ldap.project_name_attribute,
@@ -1087,7 +1152,7 @@ class LDAPIdentity(BaseLDAPIdentity, tests.TestCase):
self.config_fixture.config(
group='ldap',
project_attribute_ignore=['name', 'description', 'enabled'])
- self.clear_database()
+ self.ldapdb.clear()
self.load_backends()
self.load_fixtures(default_fixtures)
# NOTE(morganfainberg): CONF.ldap.project_attribute_ignore will not be
@@ -1107,7 +1172,7 @@ class LDAPIdentity(BaseLDAPIdentity, tests.TestCase):
def test_user_enable_attribute_mask(self):
self.config_fixture.config(group='ldap', user_enabled_mask=2,
user_enabled_default='512')
- self.clear_database()
+ self.ldapdb.clear()
self.load_backends()
self.load_fixtures(default_fixtures)
@@ -1155,7 +1220,7 @@ class LDAPIdentity(BaseLDAPIdentity, tests.TestCase):
def test_user_enabled_invert(self):
self.config_fixture.config(group='ldap', user_enabled_invert=True,
user_enabled_default=False)
- self.clear_database()
+ self.ldapdb.clear()
self.load_backends()
self.load_fixtures(default_fixtures)
@@ -1426,6 +1491,26 @@ class LDAPIdentity(BaseLDAPIdentity, tests.TestCase):
new_user = [u for u in res if u['id'] == user['id']][0]
self.assertThat(new_user['description'], matchers.Equals(description))
+ def test_user_with_missing_id(self):
+ # create a user that doesn't have the id attribute
+ ldap_ = self.identity_api.driver.user.get_connection()
+ # `sn` is used for the attribute in the DN because it's allowed by
+ # the entry's objectclasses so that this test could conceivably run in
+ # the live tests.
+ ldap_id_field = 'sn'
+ ldap_id_value = uuid.uuid4().hex
+ dn = '%s=%s,ou=Users,cn=example,cn=com' % (ldap_id_field,
+ ldap_id_value)
+ modlist = [('objectClass', ['person', 'inetOrgPerson']),
+ (ldap_id_field, [ldap_id_value]),
+ ('mail', ['email@example.com']),
+ ('userPassword', [uuid.uuid4().hex])]
+ ldap_.add_s(dn, modlist)
+
+ # make sure the user doesn't break other users
+ users = self.identity_api.driver.user.get_all()
+ self.assertThat(users, matchers.HasLength(len(default_fixtures.USERS)))
+
@mock.patch.object(common_ldap_core.BaseLdap, '_ldap_get')
def test_user_mixed_case_attribute(self, mock_ldap_get):
# Mock the search results to return attribute names
@@ -1531,7 +1616,8 @@ class LDAPIdentity(BaseLDAPIdentity, tests.TestCase):
'domain_id': CONF.identity.default_domain_id,
'description': uuid.uuid4().hex,
'enabled': True,
- 'parent_id': None}
+ 'parent_id': None,
+ 'is_domain': False}
self.resource_api.create_project(project['id'], project)
project_ref = self.resource_api.get_project(project['id'])
@@ -1609,7 +1695,8 @@ class LDAPIdentity(BaseLDAPIdentity, tests.TestCase):
'description': '',
'domain_id': domain['id'],
'enabled': True,
- 'parent_id': None}
+ 'parent_id': None,
+ 'is_domain': False}
self.resource_api.create_project(project1['id'], project1)
# Creating project2 under project1. LDAP will not allow
@@ -1619,7 +1706,8 @@ class LDAPIdentity(BaseLDAPIdentity, tests.TestCase):
'description': '',
'domain_id': domain['id'],
'enabled': True,
- 'parent_id': project1['id']}
+ 'parent_id': project1['id'],
+ 'is_domain': False}
self.assertRaises(exception.InvalidParentProject,
self.resource_api.create_project,
@@ -1633,6 +1721,58 @@ class LDAPIdentity(BaseLDAPIdentity, tests.TestCase):
# Returning projects to be used across the tests
return [project1, project2]
+ def _assert_create_is_domain_project_not_allowed(self):
+ """Tests that we can't create more than one project acting as domain.
+
+ This method will be used at any test that require the creation of a
+ project that act as a domain. LDAP does not support multiple domains
+ and the only domain it has (default) is immutable.
+ """
+ domain = self._get_domain_fixture()
+ project = {'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'description': '',
+ 'domain_id': domain['id'],
+ 'enabled': True,
+ 'parent_id': None,
+ 'is_domain': True}
+
+ self.assertRaises(exception.ValidationError,
+ self.resource_api.create_project,
+ project['id'], project)
+
+ def test_update_is_domain_field(self):
+ domain = self._get_domain_fixture()
+ project = {'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'description': '',
+ 'domain_id': domain['id'],
+ 'enabled': True,
+ 'parent_id': None,
+ 'is_domain': False}
+ self.resource_api.create_project(project['id'], project)
+
+ # Try to update the is_domain field to True
+ project['is_domain'] = True
+ self.assertRaises(exception.ValidationError,
+ self.resource_api.update_project,
+ project['id'], project)
+
+ def test_delete_is_domain_project(self):
+ self._assert_create_is_domain_project_not_allowed()
+
+ def test_create_domain_under_regular_project_hierarchy_fails(self):
+ self._assert_create_hierarchy_not_allowed()
+
+ def test_create_not_is_domain_project_under_is_domain_hierarchy(self):
+ self._assert_create_hierarchy_not_allowed()
+
+ def test_create_is_domain_project(self):
+ self._assert_create_is_domain_project_not_allowed()
+
+ def test_create_project_with_parent_id_and_without_domain_id(self):
+ self._assert_create_hierarchy_not_allowed()
+
def test_check_leaf_projects(self):
projects = self._assert_create_hierarchy_not_allowed()
for project in projects:
@@ -1642,13 +1782,17 @@ class LDAPIdentity(BaseLDAPIdentity, tests.TestCase):
projects = self._assert_create_hierarchy_not_allowed()
for project in projects:
subtree_list = self.resource_api.list_projects_in_subtree(
- project)
+ project['id'])
self.assertEqual(0, len(subtree_list))
+ def test_list_projects_in_subtree_with_circular_reference(self):
+ self._assert_create_hierarchy_not_allowed()
+
def test_list_project_parents(self):
projects = self._assert_create_hierarchy_not_allowed()
for project in projects:
- parents_list = self.resource_api.list_project_parents(project)
+ parents_list = self.resource_api.list_project_parents(
+ project['id'])
self.assertEqual(0, len(parents_list))
def test_hierarchical_projects_crud(self):
@@ -1826,9 +1970,9 @@ class LDAPIdentity(BaseLDAPIdentity, tests.TestCase):
self.assertEqual(set(expected_group_ids), group_ids)
def test_user_id_attribute_in_create(self):
- conf = self.get_config(CONF.identity.default_domain_id)
- conf.ldap.user_id_attribute = 'mail'
- self.reload_backends(CONF.identity.default_domain_id)
+ driver = self.identity_api._select_identity_driver(
+ CONF.identity.default_domain_id)
+ driver.user.id_attr = 'mail'
user = {'name': u'fäké1',
'password': u'fäképass1',
@@ -1840,9 +1984,9 @@ class LDAPIdentity(BaseLDAPIdentity, tests.TestCase):
self.assertEqual(user_ref['id'], user_ref['email'])
def test_user_id_attribute_map(self):
- conf = self.get_config(CONF.identity.default_domain_id)
- conf.ldap.user_id_attribute = 'mail'
- self.reload_backends(CONF.identity.default_domain_id)
+ driver = self.identity_api._select_identity_driver(
+ CONF.identity.default_domain_id)
+ driver.user.id_attr = 'mail'
user_ref = self.identity_api.get_user(self.user_foo['email'])
# the user_id_attribute map should be honored, which means
@@ -1851,9 +1995,9 @@ class LDAPIdentity(BaseLDAPIdentity, tests.TestCase):
@mock.patch.object(common_ldap_core.BaseLdap, '_ldap_get')
def test_get_id_from_dn_for_multivalued_attribute_id(self, mock_ldap_get):
- conf = self.get_config(CONF.identity.default_domain_id)
- conf.ldap.user_id_attribute = 'mail'
- self.reload_backends(CONF.identity.default_domain_id)
+ driver = self.identity_api._select_identity_driver(
+ CONF.identity.default_domain_id)
+ driver.user.id_attr = 'mail'
# make 'email' multivalued so we can test the error condition
email1 = uuid.uuid4().hex
@@ -1888,10 +2032,10 @@ class LDAPIdentity(BaseLDAPIdentity, tests.TestCase):
@mock.patch.object(common_ldap_core.BaseLdap, '_ldap_get')
def test_user_id_not_in_dn(self, mock_ldap_get):
- conf = self.get_config(CONF.identity.default_domain_id)
- conf.ldap.user_id_attribute = 'uid'
- conf.ldap.user_name_attribute = 'cn'
- self.reload_backends(CONF.identity.default_domain_id)
+ driver = self.identity_api._select_identity_driver(
+ CONF.identity.default_domain_id)
+ driver.user.id_attr = 'uid'
+ driver.user.attribute_mapping['name'] = 'cn'
mock_ldap_get.return_value = (
'foo=bar,dc=example,dc=com',
@@ -1908,10 +2052,10 @@ class LDAPIdentity(BaseLDAPIdentity, tests.TestCase):
@mock.patch.object(common_ldap_core.BaseLdap, '_ldap_get')
def test_user_name_in_dn(self, mock_ldap_get):
- conf = self.get_config(CONF.identity.default_domain_id)
- conf.ldap.user_id_attribute = 'sAMAccountName'
- conf.ldap.user_name_attribute = 'cn'
- self.reload_backends(CONF.identity.default_domain_id)
+ driver = self.identity_api._select_identity_driver(
+ CONF.identity.default_domain_id)
+ driver.user.id_attr = 'SAMAccountName'
+ driver.user.attribute_mapping['name'] = 'cn'
mock_ldap_get.return_value = (
'cn=Foo Bar,dc=example,dc=com',
@@ -1929,12 +2073,16 @@ class LDAPIdentity(BaseLDAPIdentity, tests.TestCase):
class LDAPIdentityEnabledEmulation(LDAPIdentity):
def setUp(self):
super(LDAPIdentityEnabledEmulation, self).setUp()
- self.clear_database()
+ self.ldapdb.clear()
self.load_backends()
self.load_fixtures(default_fixtures)
for obj in [self.tenant_bar, self.tenant_baz, self.user_foo,
self.user_two, self.user_badguy]:
obj.setdefault('enabled', True)
+ _assert_backends(self,
+ assignment='ldap',
+ identity='ldap',
+ resource='ldap')
def load_fixtures(self, fixtures):
# Override super impl since need to create group container.
@@ -1961,7 +2109,8 @@ class LDAPIdentityEnabledEmulation(LDAPIdentity):
'name': uuid.uuid4().hex,
'domain_id': CONF.identity.default_domain_id,
'description': uuid.uuid4().hex,
- 'parent_id': None}
+ 'parent_id': None,
+ 'is_domain': False}
self.resource_api.create_project(project['id'], project)
project_ref = self.resource_api.get_project(project['id'])
@@ -2007,9 +2156,9 @@ class LDAPIdentityEnabledEmulation(LDAPIdentity):
user['id'])
def test_user_auth_emulated(self):
- self.config_fixture.config(group='ldap',
- user_enabled_emulation_dn='cn=test,dc=test')
- self.reload_backends(CONF.identity.default_domain_id)
+ driver = self.identity_api._select_identity_driver(
+ CONF.identity.default_domain_id)
+ driver.user.enabled_emulation_dn = 'cn=test,dc=test'
self.identity_api.authenticate(
context={},
user_id=self.user_foo['id'],
@@ -2022,7 +2171,7 @@ class LDAPIdentityEnabledEmulation(LDAPIdentity):
def test_user_enabled_invert(self):
self.config_fixture.config(group='ldap', user_enabled_invert=True,
user_enabled_default=False)
- self.clear_database()
+ self.ldapdb.clear()
self.load_backends()
self.load_fixtures(default_fixtures)
@@ -2110,32 +2259,26 @@ class LdapIdentitySqlAssignment(BaseLDAPIdentity, tests.SQLDriverOverrides,
return config_files
def setUp(self):
- self.useFixture(database.Database())
+ sqldb = self.useFixture(database.Database())
super(LdapIdentitySqlAssignment, self).setUp()
- self.clear_database()
+ self.ldapdb.clear()
self.load_backends()
cache.configure_cache_region(cache.REGION)
- self.engine = sql.get_engine()
- self.addCleanup(sql.cleanup)
-
- sql.ModelBase.metadata.create_all(bind=self.engine)
- self.addCleanup(sql.ModelBase.metadata.drop_all, bind=self.engine)
+ sqldb.recreate()
self.load_fixtures(default_fixtures)
# defaulted by the data load
self.user_foo['enabled'] = True
+ _assert_backends(self,
+ assignment='sql',
+ identity='ldap',
+ resource='sql')
def config_overrides(self):
super(LdapIdentitySqlAssignment, self).config_overrides()
- self.config_fixture.config(
- group='identity',
- driver='keystone.identity.backends.ldap.Identity')
- self.config_fixture.config(
- group='resource',
- driver='keystone.resource.backends.sql.Resource')
- self.config_fixture.config(
- group='assignment',
- driver='keystone.assignment.backends.sql.Assignment')
+ self.config_fixture.config(group='identity', driver='ldap')
+ self.config_fixture.config(group='resource', driver='sql')
+ self.config_fixture.config(group='assignment', driver='sql')
def test_domain_crud(self):
pass
@@ -2214,6 +2357,11 @@ class LdapIdentitySqlAssignment(BaseLDAPIdentity, tests.SQLDriverOverrides,
self.skipTest("Doesn't apply since LDAP configuration is ignored for "
"SQL assignment backend.")
+ def test_list_role_assignments_filtered_by_role(self):
+ # Domain roles are supported by the SQL Assignment backend
+ base = super(BaseLDAPIdentity, self)
+ base.test_list_role_assignments_filtered_by_role()
+
class LdapIdentitySqlAssignmentWithMapping(LdapIdentitySqlAssignment):
"""Class to test mapping of default LDAP backend.
@@ -2390,16 +2538,11 @@ class MultiLDAPandSQLIdentity(BaseLDAPIdentity, tests.SQLDriverOverrides,
"""
def setUp(self):
- self.useFixture(database.Database())
+ sqldb = self.useFixture(database.Database())
super(MultiLDAPandSQLIdentity, self).setUp()
self.load_backends()
-
- self.engine = sql.get_engine()
- self.addCleanup(sql.cleanup)
-
- sql.ModelBase.metadata.create_all(bind=self.engine)
- self.addCleanup(sql.ModelBase.metadata.drop_all, bind=self.engine)
+ sqldb.recreate()
self.domain_count = 5
self.domain_specific_count = 3
@@ -2410,23 +2553,29 @@ class MultiLDAPandSQLIdentity(BaseLDAPIdentity, tests.SQLDriverOverrides,
# for separate backends per domain.
self.enable_multi_domain()
- self.clear_database()
+ self.ldapdb.clear()
self.load_fixtures(default_fixtures)
self.create_users_across_domains()
+ self.assert_backends()
+
+ def assert_backends(self):
+ _assert_backends(self,
+ assignment='sql',
+ identity={
+ None: 'sql',
+ self.domains['domain_default']['id']: 'ldap',
+ self.domains['domain1']['id']: 'ldap',
+ self.domains['domain2']['id']: 'ldap',
+ },
+ resource='sql')
def config_overrides(self):
super(MultiLDAPandSQLIdentity, self).config_overrides()
# Make sure identity and assignment are actually SQL drivers,
# BaseLDAPIdentity sets these options to use LDAP.
- self.config_fixture.config(
- group='identity',
- driver='keystone.identity.backends.sql.Identity')
- self.config_fixture.config(
- group='resource',
- driver='keystone.resource.backends.sql.Resource')
- self.config_fixture.config(
- group='assignment',
- driver='keystone.assignment.backends.sql.Assignment')
+ self.config_fixture.config(group='identity', driver='sql')
+ self.config_fixture.config(group='resource', driver='sql')
+ self.config_fixture.config(group='assignment', driver='sql')
def _setup_initial_users(self):
# Create some identity entities BEFORE we switch to multi-backend, so
@@ -2453,11 +2602,6 @@ class MultiLDAPandSQLIdentity(BaseLDAPIdentity, tests.SQLDriverOverrides,
self.config_fixture.config(group='identity_mapping',
backward_compatible_ids=False)
- def reload_backends(self, domain_id):
- # Just reload the driver for this domain - which will pickup
- # any updated cfg
- self.identity_api.domain_configs.reload_domain_driver(domain_id)
-
def get_config(self, domain_id):
# Get the config for this domain, will return CONF
# if no specific config defined for this domain
@@ -2619,7 +2763,8 @@ class MultiLDAPandSQLIdentity(BaseLDAPIdentity, tests.SQLDriverOverrides,
'domain_id': domain['id'],
'description': uuid.uuid4().hex,
'parent_id': None,
- 'enabled': True}
+ 'enabled': True,
+ 'is_domain': False}
self.resource_api.create_domain(domain['id'], domain)
self.resource_api.create_project(project['id'], project)
project_ref = self.resource_api.get_project(project['id'])
@@ -2653,6 +2798,11 @@ class MultiLDAPandSQLIdentity(BaseLDAPIdentity, tests.SQLDriverOverrides,
self.skipTest("Doesn't apply since LDAP configuration is ignored for "
"SQL assignment backend.")
+ def test_list_role_assignments_filtered_by_role(self):
+ # Domain roles are supported by the SQL Assignment backend
+ base = super(BaseLDAPIdentity, self)
+ base.test_list_role_assignments_filtered_by_role()
+
class MultiLDAPandSQLIdentityDomainConfigsInSQL(MultiLDAPandSQLIdentity):
"""Class to test the use of domain configs stored in the database.
@@ -2662,6 +2812,18 @@ class MultiLDAPandSQLIdentityDomainConfigsInSQL(MultiLDAPandSQLIdentity):
database.
"""
+
+ def assert_backends(self):
+ _assert_backends(self,
+ assignment='sql',
+ identity={
+ None: 'sql',
+ self.domains['domain_default']['id']: 'ldap',
+ self.domains['domain1']['id']: 'ldap',
+ self.domains['domain2']['id']: 'ldap',
+ },
+ resource='sql')
+
def enable_multi_domain(self):
# The values below are the same as in the domain_configs_multi_ldap
# cdirectory of test config_files.
@@ -2670,14 +2832,14 @@ class MultiLDAPandSQLIdentityDomainConfigsInSQL(MultiLDAPandSQLIdentity):
'user': 'cn=Admin',
'password': 'password',
'suffix': 'cn=example,cn=com'},
- 'identity': {'driver': 'keystone.identity.backends.ldap.Identity'}
+ 'identity': {'driver': 'ldap'}
}
domain1_config = {
'ldap': {'url': 'fake://memory1',
'user': 'cn=Admin',
'password': 'password',
'suffix': 'cn=example,cn=com'},
- 'identity': {'driver': 'keystone.identity.backends.ldap.Identity'}
+ 'identity': {'driver': 'ldap'}
}
domain2_config = {
'ldap': {'url': 'fake://memory',
@@ -2686,7 +2848,7 @@ class MultiLDAPandSQLIdentityDomainConfigsInSQL(MultiLDAPandSQLIdentity):
'suffix': 'cn=myroot,cn=com',
'group_tree_dn': 'ou=UserGroups,dc=myroot,dc=org',
'user_tree_dn': 'ou=Users,dc=myroot,dc=org'},
- 'identity': {'driver': 'keystone.identity.backends.ldap.Identity'}
+ 'identity': {'driver': 'ldap'}
}
self.domain_config_api.create_config(CONF.identity.default_domain_id,
@@ -2725,6 +2887,48 @@ class MultiLDAPandSQLIdentityDomainConfigsInSQL(MultiLDAPandSQLIdentity):
CONF.identity.default_domain_id))
self.assertEqual(CONF.ldap.url, default_config.ldap.url)
+ def test_reloading_domain_config(self):
+ """Ensure domain drivers are reloaded on a config modification."""
+
+ domain_cfgs = self.identity_api.domain_configs
+
+ # Create a new config for the default domain, hence overwriting the
+ # current settings.
+ new_config = {
+ 'ldap': {'url': uuid.uuid4().hex},
+ 'identity': {'driver': 'ldap'}}
+ self.domain_config_api.create_config(
+ CONF.identity.default_domain_id, new_config)
+ default_config = (
+ domain_cfgs.get_domain_conf(CONF.identity.default_domain_id))
+ self.assertEqual(new_config['ldap']['url'], default_config.ldap.url)
+
+ # Ensure updating is also honored
+ updated_config = {'url': uuid.uuid4().hex}
+ self.domain_config_api.update_config(
+ CONF.identity.default_domain_id, updated_config,
+ group='ldap', option='url')
+ default_config = (
+ domain_cfgs.get_domain_conf(CONF.identity.default_domain_id))
+ self.assertEqual(updated_config['url'], default_config.ldap.url)
+
+ # ...and finally ensure delete causes the driver to get the standard
+ # config again.
+ self.domain_config_api.delete_config(CONF.identity.default_domain_id)
+ default_config = (
+ domain_cfgs.get_domain_conf(CONF.identity.default_domain_id))
+ self.assertEqual(CONF.ldap.url, default_config.ldap.url)
+
+ def test_setting_sql_driver_raises_exception(self):
+ """Ensure setting of domain specific sql driver is prevented."""
+
+ new_config = {'identity': {'driver': 'sql'}}
+ self.domain_config_api.create_config(
+ CONF.identity.default_domain_id, new_config)
+ self.assertRaises(exception.InvalidDomainConfig,
+ self.identity_api.domain_configs.get_domain_conf,
+ CONF.identity.default_domain_id)
+
class DomainSpecificLDAPandSQLIdentity(
BaseLDAPIdentity, tests.SQLDriverOverrides, tests.TestCase,
@@ -2740,11 +2944,11 @@ class DomainSpecificLDAPandSQLIdentity(
"""
def setUp(self):
- self.useFixture(database.Database())
+ sqldb = self.useFixture(database.Database())
super(DomainSpecificLDAPandSQLIdentity, self).setUp()
- self.initial_setup()
+ self.initial_setup(sqldb)
- def initial_setup(self):
+ def initial_setup(self, sqldb):
# We aren't setting up any initial data ahead of switching to
# domain-specific operation, so make the switch straight away.
self.config_fixture.config(
@@ -2755,37 +2959,33 @@ class DomainSpecificLDAPandSQLIdentity(
backward_compatible_ids=False)
self.load_backends()
-
- self.engine = sql.get_engine()
- self.addCleanup(sql.cleanup)
-
- sql.ModelBase.metadata.create_all(bind=self.engine)
- self.addCleanup(sql.ModelBase.metadata.drop_all, bind=self.engine)
+ sqldb.recreate()
self.domain_count = 2
self.domain_specific_count = 2
self.setup_initial_domains()
self.users = {}
- self.clear_database()
+ self.ldapdb.clear()
self.load_fixtures(default_fixtures)
self.create_users_across_domains()
+ _assert_backends(
+ self,
+ assignment='sql',
+ identity={
+ None: 'ldap',
+ 'default': 'ldap',
+ self.domains['domain1']['id']: 'sql',
+ },
+ resource='sql')
+
def config_overrides(self):
super(DomainSpecificLDAPandSQLIdentity, self).config_overrides()
# Make sure resource & assignment are actually SQL drivers,
# BaseLDAPIdentity causes this option to use LDAP.
- self.config_fixture.config(
- group='resource',
- driver='keystone.resource.backends.sql.Resource')
- self.config_fixture.config(
- group='assignment',
- driver='keystone.assignment.backends.sql.Assignment')
-
- def reload_backends(self, domain_id):
- # Just reload the driver for this domain - which will pickup
- # any updated cfg
- self.identity_api.domain_configs.reload_domain_driver(domain_id)
+ self.config_fixture.config(group='resource', driver='sql')
+ self.config_fixture.config(group='assignment', driver='sql')
def get_config(self, domain_id):
# Get the config for this domain, will return CONF
@@ -2889,6 +3089,11 @@ class DomainSpecificLDAPandSQLIdentity(
self.skipTest("Doesn't apply since LDAP configuration is ignored for "
"SQL assignment backend.")
+ def test_list_role_assignments_filtered_by_role(self):
+ # Domain roles are supported by the SQL Assignment backend
+ base = super(BaseLDAPIdentity, self)
+ base.test_list_role_assignments_filtered_by_role()
+
class DomainSpecificSQLIdentity(DomainSpecificLDAPandSQLIdentity):
"""Class to test simplest use of domain-specific SQL driver.
@@ -2902,7 +3107,7 @@ class DomainSpecificSQLIdentity(DomainSpecificLDAPandSQLIdentity):
- A separate SQL backend for domain1
"""
- def initial_setup(self):
+ def initial_setup(self, sqldb):
# We aren't setting up any initial data ahead of switching to
# domain-specific operation, so make the switch straight away.
self.config_fixture.config(
@@ -2916,12 +3121,7 @@ class DomainSpecificSQLIdentity(DomainSpecificLDAPandSQLIdentity):
backward_compatible_ids=True)
self.load_backends()
-
- self.engine = sql.get_engine()
- self.addCleanup(sql.cleanup)
-
- sql.ModelBase.metadata.create_all(bind=self.engine)
- self.addCleanup(sql.ModelBase.metadata.drop_all, bind=self.engine)
+ sqldb.recreate()
self.domain_count = 2
self.domain_specific_count = 1
@@ -2931,17 +3131,16 @@ class DomainSpecificSQLIdentity(DomainSpecificLDAPandSQLIdentity):
self.load_fixtures(default_fixtures)
self.create_users_across_domains()
+ _assert_backends(self,
+ assignment='sql',
+ identity='ldap',
+ resource='sql')
+
def config_overrides(self):
super(DomainSpecificSQLIdentity, self).config_overrides()
- self.config_fixture.config(
- group='identity',
- driver='keystone.identity.backends.ldap.Identity')
- self.config_fixture.config(
- group='resource',
- driver='keystone.resource.backends.sql.Resource')
- self.config_fixture.config(
- group='assignment',
- driver='keystone.assignment.backends.sql.Assignment')
+ self.config_fixture.config(group='identity', driver='ldap')
+ self.config_fixture.config(group='resource', driver='sql')
+ self.config_fixture.config(group='assignment', driver='sql')
def get_config(self, domain_id):
if domain_id == CONF.identity.default_domain_id:
@@ -2949,36 +3148,20 @@ class DomainSpecificSQLIdentity(DomainSpecificLDAPandSQLIdentity):
else:
return self.identity_api.domain_configs.get_domain_conf(domain_id)
- def reload_backends(self, domain_id):
- if domain_id == CONF.identity.default_domain_id:
- self.load_backends()
- else:
- # Just reload the driver for this domain - which will pickup
- # any updated cfg
- self.identity_api.domain_configs.reload_domain_driver(domain_id)
-
def test_default_sql_plus_sql_specific_driver_fails(self):
# First confirm that if ldap is default driver, domain1 can be
# loaded as sql
- self.config_fixture.config(
- group='identity',
- driver='keystone.identity.backends.ldap.Identity')
- self.config_fixture.config(
- group='assignment',
- driver='keystone.assignment.backends.sql.Assignment')
+ self.config_fixture.config(group='identity', driver='ldap')
+ self.config_fixture.config(group='assignment', driver='sql')
self.load_backends()
# Make any identity call to initiate the lazy loading of configs
self.identity_api.list_users(
domain_scope=CONF.identity.default_domain_id)
self.assertIsNotNone(self.get_config(self.domains['domain1']['id']))
- # Now re-initialize, but with sql as the default identity driver
- self.config_fixture.config(
- group='identity',
- driver='keystone.identity.backends.sql.Identity')
- self.config_fixture.config(
- group='assignment',
- driver='keystone.assignment.backends.sql.Assignment')
+ # Now re-initialize, but with sql as the identity driver
+ self.config_fixture.config(group='identity', driver='sql')
+ self.config_fixture.config(group='assignment', driver='sql')
self.load_backends()
# Make any identity call to initiate the lazy loading of configs, which
# should fail since we would now have two sql drivers.
@@ -2987,12 +3170,8 @@ class DomainSpecificSQLIdentity(DomainSpecificLDAPandSQLIdentity):
domain_scope=CONF.identity.default_domain_id)
def test_multiple_sql_specific_drivers_fails(self):
- self.config_fixture.config(
- group='identity',
- driver='keystone.identity.backends.ldap.Identity')
- self.config_fixture.config(
- group='assignment',
- driver='keystone.assignment.backends.sql.Assignment')
+ self.config_fixture.config(group='identity', driver='ldap')
+ self.config_fixture.config(group='assignment', driver='sql')
self.load_backends()
# Ensure default, domain1 and domain2 exist
self.domain_count = 3
@@ -3019,31 +3198,30 @@ class LdapFilterTests(test_backend.FilterTests, tests.TestCase):
def setUp(self):
super(LdapFilterTests, self).setUp()
- self.useFixture(database.Database())
- self.clear_database()
+ sqldb = self.useFixture(database.Database())
+ self.useFixture(ldapdb.LDAPDatabase())
- common_ldap.register_handler('fake://', fakeldap.FakeLdap)
self.load_backends()
self.load_fixtures(default_fixtures)
-
- self.engine = sql.get_engine()
- self.addCleanup(sql.cleanup)
- sql.ModelBase.metadata.create_all(bind=self.engine)
-
- self.addCleanup(sql.ModelBase.metadata.drop_all, bind=self.engine)
- self.addCleanup(common_ldap_core._HANDLERS.clear)
+ sqldb.recreate()
+ _assert_backends(self, assignment='ldap', identity='ldap')
def config_overrides(self):
super(LdapFilterTests, self).config_overrides()
- self.config_fixture.config(
- group='identity',
- driver='keystone.identity.backends.ldap.Identity')
+ self.config_fixture.config(group='identity', driver='ldap')
def config_files(self):
config_files = super(LdapFilterTests, self).config_files()
config_files.append(tests.dirs.tests_conf('backend_ldap.conf'))
return config_files
- def clear_database(self):
- for shelf in fakeldap.FakeShelves:
- fakeldap.FakeShelves[shelf].clear()
+ def test_list_users_in_group_filtered(self):
+ # The LDAP identity driver currently does not support filtering on the
+ # listing users for a given group, so will fail this test.
+ try:
+ super(LdapFilterTests, self).test_list_users_in_group_filtered()
+ except matchers.MismatchError:
+ return
+ # We shouldn't get here...if we do, it means someone has implemented
+ # filtering, so we can remove this test override.
+ self.assertTrue(False)
diff --git a/keystone-moon/keystone/tests/unit/test_backend_ldap_pool.py b/keystone-moon/keystone/tests/unit/test_backend_ldap_pool.py
index eee03b8b..66827d7e 100644
--- a/keystone-moon/keystone/tests/unit/test_backend_ldap_pool.py
+++ b/keystone-moon/keystone/tests/unit/test_backend_ldap_pool.py
@@ -210,9 +210,7 @@ class LdapPoolCommonTestMixin(object):
class LdapIdentitySqlAssignment(LdapPoolCommonTestMixin,
test_backend_ldap.LdapIdentitySqlAssignment,
tests.TestCase):
- '''Executes existing base class 150+ tests with pooled LDAP handler to make
- sure it works without any error.
- '''
+ """Executes tests in existing base class with pooled LDAP handler."""
def setUp(self):
self.useFixture(mockpatch.PatchObject(
ldap_core.PooledLDAPHandler, 'Connector', fakeldap.FakeLdapPool))
diff --git a/keystone-moon/keystone/tests/unit/test_backend_rules.py b/keystone-moon/keystone/tests/unit/test_backend_rules.py
index c9c4f151..bc0dc13d 100644
--- a/keystone-moon/keystone/tests/unit/test_backend_rules.py
+++ b/keystone-moon/keystone/tests/unit/test_backend_rules.py
@@ -25,9 +25,7 @@ class RulesPolicy(tests.TestCase, test_backend.PolicyTests):
def config_overrides(self):
super(RulesPolicy, self).config_overrides()
- self.config_fixture.config(
- group='policy',
- driver='keystone.policy.backends.rules.Policy')
+ self.config_fixture.config(group='policy', driver='rules')
def test_create(self):
self.assertRaises(exception.NotImplemented,
diff --git a/keystone-moon/keystone/tests/unit/test_backend_sql.py b/keystone-moon/keystone/tests/unit/test_backend_sql.py
index a7c63bf6..bf50ac21 100644
--- a/keystone-moon/keystone/tests/unit/test_backend_sql.py
+++ b/keystone-moon/keystone/tests/unit/test_backend_sql.py
@@ -20,6 +20,7 @@ import mock
from oslo_config import cfg
from oslo_db import exception as db_exception
from oslo_db import options
+from six.moves import range
import sqlalchemy
from sqlalchemy import exc
from testtools import matchers
@@ -28,7 +29,6 @@ from keystone.common import driver_hints
from keystone.common import sql
from keystone import exception
from keystone.identity.backends import sql as identity_sql
-from keystone.openstack.common import versionutils
from keystone.tests import unit as tests
from keystone.tests.unit import default_fixtures
from keystone.tests.unit.ksfixtures import database
@@ -67,18 +67,67 @@ class SqlModels(SqlTests):
s = sqlalchemy.select([table])
return s
- def assertExpectedSchema(self, table, cols):
+ def assertExpectedSchema(self, table, expected_schema):
+ """Assert that a table's schema is what we expect.
+
+ :param string table: the name of the table to inspect
+ :param tuple expected_schema: a tuple of tuples containing the
+ expected schema
+ :raises AssertionError: when the database schema doesn't match the
+ expected schema
+
+ The expected_schema format is simply::
+
+ (
+ ('column name', sql type, qualifying detail),
+ ...
+ )
+
+ The qualifying detail varies based on the type of the column::
+
+ - sql.Boolean columns must indicate the column's default value or
+ None if there is no default
+ - Columns with a length, like sql.String, must indicate the
+ column's length
+ - All other column types should use None
+
+ Example::
+
+ cols = (('id', sql.String, 64),
+ ('enabled', sql.Boolean, True),
+ ('extra', sql.JsonBlob, None))
+ self.assertExpectedSchema('table_name', cols)
+
+ """
table = self.select_table(table)
- for col, type_, length in cols:
- self.assertIsInstance(table.c[col].type, type_)
- if length:
- self.assertEqual(length, table.c[col].type.length)
+
+ actual_schema = []
+ for column in table.c:
+ if isinstance(column.type, sql.Boolean):
+ default = None
+ if column._proxies[0].default:
+ default = column._proxies[0].default.arg
+ actual_schema.append((column.name, type(column.type), default))
+ elif (hasattr(column.type, 'length') and
+ not isinstance(column.type, sql.Enum)):
+ # NOTE(dstanek): Even though sql.Enum columns have a length
+ # set we don't want to catch them here. Maybe in the future
+ # we'll check to see that they contain a list of the correct
+ # possible values.
+ actual_schema.append((column.name,
+ type(column.type),
+ column.type.length))
+ else:
+ actual_schema.append((column.name, type(column.type), None))
+
+ self.assertItemsEqual(expected_schema, actual_schema)
def test_user_model(self):
cols = (('id', sql.String, 64),
('name', sql.String, 255),
('password', sql.String, 128),
('domain_id', sql.String, 64),
+ ('default_project_id', sql.String, 64),
('enabled', sql.Boolean, None),
('extra', sql.JsonBlob, None))
self.assertExpectedSchema('user', cols)
@@ -94,7 +143,8 @@ class SqlModels(SqlTests):
def test_domain_model(self):
cols = (('id', sql.String, 64),
('name', sql.String, 64),
- ('enabled', sql.Boolean, None))
+ ('enabled', sql.Boolean, True),
+ ('extra', sql.JsonBlob, None))
self.assertExpectedSchema('domain', cols)
def test_project_model(self):
@@ -104,7 +154,8 @@ class SqlModels(SqlTests):
('domain_id', sql.String, 64),
('enabled', sql.Boolean, None),
('extra', sql.JsonBlob, None),
- ('parent_id', sql.String, 64))
+ ('parent_id', sql.String, 64),
+ ('is_domain', sql.Boolean, False))
self.assertExpectedSchema('project', cols)
def test_role_assignment_model(self):
@@ -692,6 +743,9 @@ class SqlTokenCacheInvalidation(SqlTests, test_backend.TokenCacheInvalidation):
class SqlFilterTests(SqlTests, test_backend.FilterTests):
+ def _get_user_name_field_size(self):
+ return identity_sql.User.name.type.length
+
def clean_up_entities(self):
"""Clean up entity test data from Filter Test Cases."""
@@ -761,21 +815,6 @@ class SqlFilterTests(SqlTests, test_backend.FilterTests):
groups = self.identity_api.list_groups()
self.assertTrue(len(groups) > 0)
- def test_groups_for_user_filtered(self):
- # The SQL identity driver currently does not support filtering on the
- # listing groups for a given user, so will fail this test. This is
- # raised as bug #1412447.
- try:
- super(SqlFilterTests, self).test_groups_for_user_filtered()
- except matchers.MismatchError:
- return
- # We shouldn't get here...if we do, it means someone has fixed the
- # above defect, so we can remove this test override. As an aside, it
- # would be nice to have used self.assertRaises() around the call above
- # to achieve the logic here...but that does not seem to work when
- # wrapping another assert (it won't seem to catch the error).
- self.assertTrue(False)
-
class SqlLimitTests(SqlTests, test_backend.LimitTests):
def setUp(self):
@@ -881,68 +920,3 @@ class SqlCredential(SqlTests):
credentials = self.credential_api.list_credentials_for_user(
self.user_foo['id'])
self._validateCredentialList(credentials, self.user_credentials)
-
-
-class DeprecatedDecorators(SqlTests):
-
- def test_assignment_to_role_api(self):
- """Test that calling one of the methods does call LOG.deprecated.
-
- This method is really generic to the type of backend, but we need
- one to execute the test, so the SQL backend is as good as any.
-
- """
-
- # Rather than try and check that a log message is issued, we
- # enable fatal_deprecations so that we can check for the
- # raising of the exception.
-
- # First try to create a role without enabling fatal deprecations,
- # which should work due to the cross manager deprecated calls.
- role_ref = {
- 'id': uuid.uuid4().hex,
- 'name': uuid.uuid4().hex}
- self.assignment_api.create_role(role_ref['id'], role_ref)
- self.role_api.get_role(role_ref['id'])
-
- # Now enable fatal exceptions - creating a role by calling the
- # old manager should now fail.
- self.config_fixture.config(fatal_deprecations=True)
- role_ref = {
- 'id': uuid.uuid4().hex,
- 'name': uuid.uuid4().hex}
- self.assertRaises(versionutils.DeprecatedConfig,
- self.assignment_api.create_role,
- role_ref['id'], role_ref)
-
- def test_assignment_to_resource_api(self):
- """Test that calling one of the methods does call LOG.deprecated.
-
- This method is really generic to the type of backend, but we need
- one to execute the test, so the SQL backend is as good as any.
-
- """
-
- # Rather than try and check that a log message is issued, we
- # enable fatal_deprecations so that we can check for the
- # raising of the exception.
-
- # First try to create a project without enabling fatal deprecations,
- # which should work due to the cross manager deprecated calls.
- project_ref = {
- 'id': uuid.uuid4().hex,
- 'name': uuid.uuid4().hex,
- 'domain_id': DEFAULT_DOMAIN_ID}
- self.resource_api.create_project(project_ref['id'], project_ref)
- self.resource_api.get_project(project_ref['id'])
-
- # Now enable fatal exceptions - creating a project by calling the
- # old manager should now fail.
- self.config_fixture.config(fatal_deprecations=True)
- project_ref = {
- 'id': uuid.uuid4().hex,
- 'name': uuid.uuid4().hex,
- 'domain_id': DEFAULT_DOMAIN_ID}
- self.assertRaises(versionutils.DeprecatedConfig,
- self.assignment_api.create_project,
- project_ref['id'], project_ref)
diff --git a/keystone-moon/keystone/tests/unit/test_backend_templated.py b/keystone-moon/keystone/tests/unit/test_backend_templated.py
index a1c15fb1..82a8bed8 100644
--- a/keystone-moon/keystone/tests/unit/test_backend_templated.py
+++ b/keystone-moon/keystone/tests/unit/test_backend_templated.py
@@ -12,18 +12,20 @@
# License for the specific language governing permissions and limitations
# under the License.
-import os
import uuid
+import mock
+from six.moves import zip
+
+from keystone import catalog
from keystone.tests import unit as tests
from keystone.tests.unit import default_fixtures
from keystone.tests.unit.ksfixtures import database
from keystone.tests.unit import test_backend
-DEFAULT_CATALOG_TEMPLATES = os.path.abspath(os.path.join(
- os.path.dirname(__file__),
- 'default_catalog.templates'))
+BROKEN_WRITE_FUNCTIONALITY_MSG = ("Templated backend doesn't correctly "
+ "implement write operations")
class TestTemplatedCatalog(tests.TestCase, test_backend.CatalogTests):
@@ -55,8 +57,10 @@ class TestTemplatedCatalog(tests.TestCase, test_backend.CatalogTests):
def config_overrides(self):
super(TestTemplatedCatalog, self).config_overrides()
- self.config_fixture.config(group='catalog',
- template_file=DEFAULT_CATALOG_TEMPLATES)
+ self.config_fixture.config(
+ group='catalog',
+ driver='templated',
+ template_file=tests.dirs.tests('default_catalog.templates'))
def test_get_catalog(self):
catalog_ref = self.catalog_api.get_catalog('foo', 'bar')
@@ -120,8 +124,116 @@ class TestTemplatedCatalog(tests.TestCase, test_backend.CatalogTests):
'id': '1'}]
self.assert_catalogs_equal(exp_catalog, catalog_ref)
+ def test_get_catalog_ignores_endpoints_with_invalid_urls(self):
+ user_id = uuid.uuid4().hex
+ # If the URL has no 'tenant_id' to substitute, we will skip the
+ # endpoint which contains this kind of URL.
+ catalog_ref = self.catalog_api.get_v3_catalog(user_id, tenant_id=None)
+ exp_catalog = [
+ {'endpoints': [],
+ 'type': 'compute',
+ 'name': "'Compute Service'",
+ 'id': '2'},
+ {'endpoints': [
+ {'interface': 'admin',
+ 'region': 'RegionOne',
+ 'url': 'http://localhost:35357/v2.0'},
+ {'interface': 'public',
+ 'region': 'RegionOne',
+ 'url': 'http://localhost:5000/v2.0'},
+ {'interface': 'internal',
+ 'region': 'RegionOne',
+ 'url': 'http://localhost:35357/v2.0'}],
+ 'type': 'identity',
+ 'name': "'Identity Service'",
+ 'id': '1'}]
+ self.assert_catalogs_equal(exp_catalog, catalog_ref)
+
def test_list_regions_filtered_by_parent_region_id(self):
self.skipTest('Templated backend does not support hints')
def test_service_filtering(self):
self.skipTest("Templated backend doesn't support filtering")
+
+ # NOTE(dstanek): the following methods have been overridden
+ # from test_backend.CatalogTests
+
+ def test_region_crud(self):
+ self.skipTest(BROKEN_WRITE_FUNCTIONALITY_MSG)
+
+ @tests.skip_if_cache_disabled('catalog')
+ def test_cache_layer_region_crud(self):
+ self.skipTest(BROKEN_WRITE_FUNCTIONALITY_MSG)
+
+ @tests.skip_if_cache_disabled('catalog')
+ def test_invalidate_cache_when_updating_region(self):
+ self.skipTest(BROKEN_WRITE_FUNCTIONALITY_MSG)
+
+ def test_create_region_with_duplicate_id(self):
+ self.skipTest(BROKEN_WRITE_FUNCTIONALITY_MSG)
+
+ def test_delete_region_404(self):
+ self.skipTest(BROKEN_WRITE_FUNCTIONALITY_MSG)
+
+ def test_create_region_invalid_parent_region_404(self):
+ self.skipTest(BROKEN_WRITE_FUNCTIONALITY_MSG)
+
+ def test_avoid_creating_circular_references_in_regions_update(self):
+ self.skipTest(BROKEN_WRITE_FUNCTIONALITY_MSG)
+
+ @mock.patch.object(catalog.Driver,
+ "_ensure_no_circle_in_hierarchical_regions")
+ def test_circular_regions_can_be_deleted(self, mock_ensure_on_circle):
+ self.skipTest(BROKEN_WRITE_FUNCTIONALITY_MSG)
+
+ def test_service_crud(self):
+ self.skipTest(BROKEN_WRITE_FUNCTIONALITY_MSG)
+
+ @tests.skip_if_cache_disabled('catalog')
+ def test_cache_layer_service_crud(self):
+ self.skipTest(BROKEN_WRITE_FUNCTIONALITY_MSG)
+
+ @tests.skip_if_cache_disabled('catalog')
+ def test_invalidate_cache_when_updating_service(self):
+ self.skipTest(BROKEN_WRITE_FUNCTIONALITY_MSG)
+
+ def test_delete_service_with_endpoint(self):
+ self.skipTest(BROKEN_WRITE_FUNCTIONALITY_MSG)
+
+ def test_cache_layer_delete_service_with_endpoint(self):
+ self.skipTest(BROKEN_WRITE_FUNCTIONALITY_MSG)
+
+ def test_delete_service_404(self):
+ self.skipTest(BROKEN_WRITE_FUNCTIONALITY_MSG)
+
+ def test_update_endpoint_nonexistent_service(self):
+ self.skipTest(BROKEN_WRITE_FUNCTIONALITY_MSG)
+
+ def test_create_endpoint_nonexistent_region(self):
+ self.skipTest(BROKEN_WRITE_FUNCTIONALITY_MSG)
+
+ def test_update_endpoint_nonexistent_region(self):
+ self.skipTest(BROKEN_WRITE_FUNCTIONALITY_MSG)
+
+ def test_get_endpoint_404(self):
+ self.skipTest("Templated backend doesn't use IDs for endpoints.")
+
+ def test_delete_endpoint_404(self):
+ self.skipTest(BROKEN_WRITE_FUNCTIONALITY_MSG)
+
+ def test_create_endpoint(self):
+ self.skipTest(BROKEN_WRITE_FUNCTIONALITY_MSG)
+
+ def test_update_endpoint(self):
+ self.skipTest(BROKEN_WRITE_FUNCTIONALITY_MSG)
+
+ def test_list_endpoints(self):
+ # NOTE(dstanek): a future commit will fix this functionality and
+ # this test
+ expected_ids = set()
+ endpoints = self.catalog_api.list_endpoints()
+ self.assertEqual(expected_ids, set(e['id'] for e in endpoints))
+
+ @tests.skip_if_cache_disabled('catalog')
+ def test_invalidate_cache_when_updating_endpoint(self):
+ self.skipTest(BROKEN_WRITE_FUNCTIONALITY_MSG)
diff --git a/keystone-moon/keystone/tests/unit/test_cache.py b/keystone-moon/keystone/tests/unit/test_cache.py
index 5a778a07..c60df877 100644
--- a/keystone-moon/keystone/tests/unit/test_cache.py
+++ b/keystone-moon/keystone/tests/unit/test_cache.py
@@ -47,10 +47,12 @@ def _copy_value(value):
# backend unless you are running tests or expecting odd/strange results.
class CacheIsolatingProxy(proxy.ProxyBackend):
"""Proxy that forces a memory copy of stored values.
- The default in-memory cache-region does not perform a copy on values it
- is meant to cache. Therefore if the value is modified after set or after
- get, the cached value also is modified. This proxy does a copy as the last
+
+ The default in-memory cache-region does not perform a copy on values it is
+ meant to cache. Therefore if the value is modified after set or after get,
+ the cached value also is modified. This proxy does a copy as the last
thing before storing data.
+
"""
def get(self, key):
return _copy_value(self.proxied.get(key))
diff --git a/keystone-moon/keystone/tests/unit/test_cache_backend_mongo.py b/keystone-moon/keystone/tests/unit/test_cache_backend_mongo.py
index a56bf754..369570d6 100644
--- a/keystone-moon/keystone/tests/unit/test_cache_backend_mongo.py
+++ b/keystone-moon/keystone/tests/unit/test_cache_backend_mongo.py
@@ -20,6 +20,7 @@ import uuid
from dogpile.cache import api
from dogpile.cache import region as dp_region
import six
+from six.moves import range
from keystone.common.cache.backends import mongo
from keystone import exception
@@ -139,13 +140,13 @@ class MockCollection(object):
if self._apply_filter(document, spec))
def _apply_filter(self, document, query):
- for key, search in six.iteritems(query):
+ for key, search in query.items():
doc_val = document.get(key)
if isinstance(search, dict):
op_dict = {'$in': lambda dv, sv: dv in sv}
is_match = all(
op_str in op_dict and op_dict[op_str](doc_val, search_val)
- for op_str, search_val in six.iteritems(search)
+ for op_str, search_val in search.items()
)
else:
is_match = doc_val == search
@@ -160,7 +161,7 @@ class MockCollection(object):
return new
if isinstance(obj, dict):
new = container()
- for key, value in obj.items():
+ for key, value in list(obj.items()):
new[key] = self._copy_doc(value, container)
return new
else:
@@ -198,7 +199,7 @@ class MockCollection(object):
existing_doc = self._documents[self._insert(document)]
def _internalize_dict(self, d):
- return {k: copy.deepcopy(v) for k, v in six.iteritems(d)}
+ return {k: copy.deepcopy(v) for k, v in d.items()}
def remove(self, spec_or_id=None, search_filter=None):
"""Remove objects matching spec_or_id from the collection."""
diff --git a/keystone-moon/keystone/tests/unit/test_catalog.py b/keystone-moon/keystone/tests/unit/test_catalog.py
index 9dda5d83..4e7f4037 100644
--- a/keystone-moon/keystone/tests/unit/test_catalog.py
+++ b/keystone-moon/keystone/tests/unit/test_catalog.py
@@ -14,8 +14,6 @@
import uuid
-import six
-
from keystone import catalog
from keystone.tests import unit as tests
from keystone.tests.unit.ksfixtures import database
@@ -47,9 +45,7 @@ class V2CatalogTestCase(rest.RestfulTestCase):
def config_overrides(self):
super(V2CatalogTestCase, self).config_overrides()
- self.config_fixture.config(
- group='catalog',
- driver='keystone.catalog.backends.sql.Catalog')
+ self.config_fixture.config(group='catalog', driver='sql')
def new_ref(self):
"""Populates a ref with attributes common to all API entities."""
@@ -95,7 +91,7 @@ class V2CatalogTestCase(rest.RestfulTestCase):
req_body, response = self._endpoint_create()
self.assertIn('endpoint', response.result)
self.assertIn('id', response.result['endpoint'])
- for field, value in six.iteritems(req_body['endpoint']):
+ for field, value in req_body['endpoint'].items():
self.assertEqual(response.result['endpoint'][field], value)
def test_endpoint_create_with_null_adminurl(self):
@@ -130,6 +126,92 @@ class V2CatalogTestCase(rest.RestfulTestCase):
def test_endpoint_create_with_empty_service_id(self):
self._endpoint_create(expected_status=400, service_id='')
+ def test_endpoint_create_with_valid_url(self):
+ """Create endpoint with valid URL should be tested, too."""
+ # list one valid url is enough, no need to list too much
+ valid_url = 'http://127.0.0.1:8774/v1.1/$(tenant_id)s'
+
+ # baseline tests that all valid URLs works
+ self._endpoint_create(expected_status=200,
+ publicurl=valid_url,
+ internalurl=valid_url,
+ adminurl=valid_url)
+
+ def test_endpoint_create_with_invalid_url(self):
+ """Test the invalid cases: substitutions is not exactly right."""
+ invalid_urls = [
+ # using a substitution that is not whitelisted - KeyError
+ 'http://127.0.0.1:8774/v1.1/$(nonexistent)s',
+
+ # invalid formatting - ValueError
+ 'http://127.0.0.1:8774/v1.1/$(tenant_id)',
+ 'http://127.0.0.1:8774/v1.1/$(tenant_id)t',
+ 'http://127.0.0.1:8774/v1.1/$(tenant_id',
+
+ # invalid type specifier - TypeError
+ # admin_url is a string not an int
+ 'http://127.0.0.1:8774/v1.1/$(admin_url)d',
+ ]
+
+ # list one valid url is enough, no need to list too much
+ valid_url = 'http://127.0.0.1:8774/v1.1/$(tenant_id)s'
+
+ # Case one: publicurl, internalurl and adminurl are
+ # all invalid
+ for invalid_url in invalid_urls:
+ self._endpoint_create(expected_status=400,
+ publicurl=invalid_url,
+ internalurl=invalid_url,
+ adminurl=invalid_url)
+
+ # Case two: publicurl, internalurl are invalid
+ # and adminurl is valid
+ for invalid_url in invalid_urls:
+ self._endpoint_create(expected_status=400,
+ publicurl=invalid_url,
+ internalurl=invalid_url,
+ adminurl=valid_url)
+
+ # Case three: publicurl, adminurl are invalid
+ # and internalurl is valid
+ for invalid_url in invalid_urls:
+ self._endpoint_create(expected_status=400,
+ publicurl=invalid_url,
+ internalurl=valid_url,
+ adminurl=invalid_url)
+
+ # Case four: internalurl, adminurl are invalid
+ # and publicurl is valid
+ for invalid_url in invalid_urls:
+ self._endpoint_create(expected_status=400,
+ publicurl=valid_url,
+ internalurl=invalid_url,
+ adminurl=invalid_url)
+
+ # Case five: publicurl is invalid, internalurl
+ # and adminurl are valid
+ for invalid_url in invalid_urls:
+ self._endpoint_create(expected_status=400,
+ publicurl=invalid_url,
+ internalurl=valid_url,
+ adminurl=valid_url)
+
+ # Case six: internalurl is invalid, publicurl
+ # and adminurl are valid
+ for invalid_url in invalid_urls:
+ self._endpoint_create(expected_status=400,
+ publicurl=valid_url,
+ internalurl=invalid_url,
+ adminurl=valid_url)
+
+ # Case seven: adminurl is invalid, publicurl
+ # and internalurl are valid
+ for invalid_url in invalid_urls:
+ self._endpoint_create(expected_status=400,
+ publicurl=valid_url,
+ internalurl=valid_url,
+ adminurl=invalid_url)
+
class TestV2CatalogAPISQL(tests.TestCase):
@@ -147,9 +229,7 @@ class TestV2CatalogAPISQL(tests.TestCase):
def config_overrides(self):
super(TestV2CatalogAPISQL, self).config_overrides()
- self.config_fixture.config(
- group='catalog',
- driver='keystone.catalog.backends.sql.Catalog')
+ self.config_fixture.config(group='catalog', driver='sql')
def new_endpoint_ref(self, service_id):
return {
diff --git a/keystone-moon/keystone/tests/unit/test_cert_setup.py b/keystone-moon/keystone/tests/unit/test_cert_setup.py
index d1e9ccfd..3d300810 100644
--- a/keystone-moon/keystone/tests/unit/test_cert_setup.py
+++ b/keystone-moon/keystone/tests/unit/test_cert_setup.py
@@ -68,9 +68,7 @@ class CertSetupTestCase(rest.RestfulTestCase):
ca_certs=ca_certs,
certfile=os.path.join(CERTDIR, 'keystone.pem'),
keyfile=os.path.join(KEYDIR, 'keystonekey.pem'))
- self.config_fixture.config(
- group='token',
- provider='keystone.token.providers.pkiz.Provider')
+ self.config_fixture.config(group='token', provider='pkiz')
def test_can_handle_missing_certs(self):
controller = token.controllers.Auth()
diff --git a/keystone-moon/keystone/tests/unit/test_cli.py b/keystone-moon/keystone/tests/unit/test_cli.py
index 20aa03e6..3f37612e 100644
--- a/keystone-moon/keystone/tests/unit/test_cli.py
+++ b/keystone-moon/keystone/tests/unit/test_cli.py
@@ -17,14 +17,16 @@ import uuid
import mock
from oslo_config import cfg
+from six.moves import range
-from keystone import cli
+from keystone.cmd import cli
from keystone.common import dependency
from keystone.i18n import _
from keystone import resource
from keystone.tests import unit as tests
from keystone.tests.unit.ksfixtures import database
+
CONF = cfg.CONF
@@ -103,14 +105,14 @@ class CliDomainConfigAllTestCase(tests.SQLDriverOverrides, tests.TestCase):
'user': 'cn=Admin',
'password': 'password',
'suffix': 'cn=example,cn=com'},
- 'identity': {'driver': 'keystone.identity.backends.ldap.Identity'}
+ 'identity': {'driver': 'ldap'}
}
domain1_config = {
'ldap': {'url': 'fake://memory1',
'user': 'cn=Admin',
'password': 'password',
'suffix': 'cn=example,cn=com'},
- 'identity': {'driver': 'keystone.identity.backends.ldap.Identity'}
+ 'identity': {'driver': 'ldap'}
}
domain2_config = {
'ldap': {'url': 'fake://memory',
@@ -119,7 +121,7 @@ class CliDomainConfigAllTestCase(tests.SQLDriverOverrides, tests.TestCase):
'suffix': 'cn=myroot,cn=com',
'group_tree_dn': 'ou=UserGroups,dc=myroot,dc=org',
'user_tree_dn': 'ou=Users,dc=myroot,dc=org'},
- 'identity': {'driver': 'keystone.identity.backends.ldap.Identity'}
+ 'identity': {'driver': 'ldap'}
}
# Clear backend dependencies, since cli loads these manually
@@ -151,7 +153,7 @@ class CliDomainConfigSingleDomainTestCase(CliDomainConfigAllTestCase):
'user': 'cn=Admin',
'password': 'password',
'suffix': 'cn=example,cn=com'},
- 'identity': {'driver': 'keystone.identity.backends.ldap.Identity'}
+ 'identity': {'driver': 'ldap'}
}
# Clear backend dependencies, since cli loads these manually
@@ -172,7 +174,7 @@ class CliDomainConfigSingleDomainTestCase(CliDomainConfigAllTestCase):
# Create a config for the default domain
default_config = {
'ldap': {'url': uuid.uuid4().hex},
- 'identity': {'driver': 'keystone.identity.backends.ldap.Identity'}
+ 'identity': {'driver': 'ldap'}
}
self.domain_config_api.create_config(
CONF.identity.default_domain_id, default_config)
diff --git a/keystone-moon/keystone/tests/unit/test_config.py b/keystone-moon/keystone/tests/unit/test_config.py
index 15cfac81..431f9965 100644
--- a/keystone-moon/keystone/tests/unit/test_config.py
+++ b/keystone-moon/keystone/tests/unit/test_config.py
@@ -46,10 +46,8 @@ class ConfigTestCase(tests.TestCase):
config.find_paste_config())
def test_config_default(self):
- self.assertEqual('keystone.auth.plugins.password.Password',
- CONF.auth.password)
- self.assertEqual('keystone.auth.plugins.token.Token',
- CONF.auth.token)
+ self.assertIs(None, CONF.auth.password)
+ self.assertIs(None, CONF.auth.token)
class DeprecatedTestCase(tests.TestCase):
diff --git a/keystone-moon/keystone/tests/unit/test_contrib_ec2.py b/keystone-moon/keystone/tests/unit/test_contrib_ec2.py
new file mode 100644
index 00000000..c6717dc5
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/test_contrib_ec2.py
@@ -0,0 +1,208 @@
+# Copyright 2015 Intel Corporation
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import uuid
+
+from keystoneclient.contrib.ec2 import utils as ec2_utils
+
+from keystone.contrib.ec2 import controllers
+from keystone import exception
+from keystone.tests import unit as tests
+from keystone.tests.unit import default_fixtures
+from keystone.tests.unit.ksfixtures import database
+
+
+class TestCredentialEc2(tests.TestCase):
+ # TODO(davechen): more testcases for ec2 credential are expected here and
+ # the file name would be renamed to "test_credential" to correspond with
+ # "test_v3_credential.py".
+ def setUp(self):
+ super(TestCredentialEc2, self).setUp()
+ self.useFixture(database.Database())
+ self.load_backends()
+ self.load_fixtures(default_fixtures)
+ self.user_id = self.user_foo['id']
+ self.project_id = self.tenant_bar['id']
+ self.blob = {'access': uuid.uuid4().hex,
+ 'secret': uuid.uuid4().hex}
+ self.controller = controllers.Ec2Controller()
+ self.creds_ref = {'user_id': self.user_id,
+ 'tenant_id': self.project_id,
+ 'access': self.blob['access'],
+ 'secret': self.blob['secret'],
+ 'trust_id': None}
+
+ def test_signature_validate_no_host_port(self):
+ """Test signature validation with the access/secret provided."""
+ access = self.blob['access']
+ secret = self.blob['secret']
+ signer = ec2_utils.Ec2Signer(secret)
+ params = {'SignatureMethod': 'HmacSHA256',
+ 'SignatureVersion': '2',
+ 'AWSAccessKeyId': access}
+ request = {'host': 'foo',
+ 'verb': 'GET',
+ 'path': '/bar',
+ 'params': params}
+ signature = signer.generate(request)
+
+ sig_ref = {'access': access,
+ 'signature': signature,
+ 'host': 'foo',
+ 'verb': 'GET',
+ 'path': '/bar',
+ 'params': params}
+
+ # Now validate the signature based on the dummy request
+ self.assertTrue(self.controller.check_signature(self.creds_ref,
+ sig_ref))
+
+ def test_signature_validate_with_host_port(self):
+ """Test signature validation when host is bound with port.
+
+ Host is bound with a port, generally, the port here is not the
+ standard port for the protocol, like '80' for HTTP and port 443
+ for HTTPS, the port is not omitted by the client library.
+ """
+ access = self.blob['access']
+ secret = self.blob['secret']
+ signer = ec2_utils.Ec2Signer(secret)
+ params = {'SignatureMethod': 'HmacSHA256',
+ 'SignatureVersion': '2',
+ 'AWSAccessKeyId': access}
+ request = {'host': 'foo:8181',
+ 'verb': 'GET',
+ 'path': '/bar',
+ 'params': params}
+ signature = signer.generate(request)
+
+ sig_ref = {'access': access,
+ 'signature': signature,
+ 'host': 'foo:8181',
+ 'verb': 'GET',
+ 'path': '/bar',
+ 'params': params}
+
+ # Now validate the signature based on the dummy request
+ self.assertTrue(self.controller.check_signature(self.creds_ref,
+ sig_ref))
+
+ def test_signature_validate_with_missed_host_port(self):
+ """Test signature validation when host is bound with well-known port.
+
+ Host is bound with a port, but the port is well-know port like '80'
+ for HTTP and port 443 for HTTPS, sometimes, client library omit
+ the port but then make the request with the port.
+ see (How to create the string to sign): 'http://docs.aws.amazon.com/
+ general/latest/gr/signature-version-2.html'.
+
+ Since "credentials['host']" is not set by client library but is
+ taken from "req.host", so caused the differences.
+ """
+ access = self.blob['access']
+ secret = self.blob['secret']
+ signer = ec2_utils.Ec2Signer(secret)
+ params = {'SignatureMethod': 'HmacSHA256',
+ 'SignatureVersion': '2',
+ 'AWSAccessKeyId': access}
+ # Omit the port to generate the signature.
+ cnt_req = {'host': 'foo',
+ 'verb': 'GET',
+ 'path': '/bar',
+ 'params': params}
+ signature = signer.generate(cnt_req)
+
+ sig_ref = {'access': access,
+ 'signature': signature,
+ 'host': 'foo:8080',
+ 'verb': 'GET',
+ 'path': '/bar',
+ 'params': params}
+
+ # Now validate the signature based on the dummy request
+ # Check the signature again after omitting the port.
+ self.assertTrue(self.controller.check_signature(self.creds_ref,
+ sig_ref))
+
+ def test_signature_validate_no_signature(self):
+ """Signature is not presented in signature reference data."""
+ access = self.blob['access']
+ params = {'SignatureMethod': 'HmacSHA256',
+ 'SignatureVersion': '2',
+ 'AWSAccessKeyId': access}
+
+ sig_ref = {'access': access,
+ 'signature': None,
+ 'host': 'foo:8080',
+ 'verb': 'GET',
+ 'path': '/bar',
+ 'params': params}
+
+ creds_ref = {'user_id': self.user_id,
+ 'tenant_id': self.project_id,
+ 'access': self.blob['access'],
+ 'secret': self.blob['secret'],
+ 'trust_id': None
+ }
+
+ # Now validate the signature based on the dummy request
+ self.assertRaises(exception.Unauthorized,
+ self.controller.check_signature,
+ creds_ref, sig_ref)
+
+ def test_signature_validate_invalid_signature(self):
+ """Signature is not signed on the correct data."""
+ access = self.blob['access']
+ secret = self.blob['secret']
+ signer = ec2_utils.Ec2Signer(secret)
+ params = {'SignatureMethod': 'HmacSHA256',
+ 'SignatureVersion': '2',
+ 'AWSAccessKeyId': access}
+ request = {'host': 'bar',
+ 'verb': 'GET',
+ 'path': '/bar',
+ 'params': params}
+ signature = signer.generate(request)
+
+ sig_ref = {'access': access,
+ 'signature': signature,
+ 'host': 'foo:8080',
+ 'verb': 'GET',
+ 'path': '/bar',
+ 'params': params}
+
+ creds_ref = {'user_id': self.user_id,
+ 'tenant_id': self.project_id,
+ 'access': self.blob['access'],
+ 'secret': self.blob['secret'],
+ 'trust_id': None
+ }
+
+ # Now validate the signature based on the dummy request
+ self.assertRaises(exception.Unauthorized,
+ self.controller.check_signature,
+ creds_ref, sig_ref)
+
+ def test_check_non_admin_user(self):
+ """Checking if user is admin causes uncaught error.
+
+ When checking if a user is an admin, keystone.exception.Unauthorized
+ is raised but not caught if the user is not an admin.
+ """
+ # make a non-admin user
+ context = {'is_admin': False, 'token_id': uuid.uuid4().hex}
+
+ # check if user is admin
+ # no exceptions should be raised
+ self.controller._is_admin(context)
diff --git a/keystone-moon/keystone/tests/unit/test_exception.py b/keystone-moon/keystone/tests/unit/test_exception.py
index f91fa2a7..bf541dfd 100644
--- a/keystone-moon/keystone/tests/unit/test_exception.py
+++ b/keystone-moon/keystone/tests/unit/test_exception.py
@@ -87,7 +87,10 @@ class ExceptionTestCase(tests.BaseTestCase):
e = exception.ValidationError(attribute='xx',
target='Long \xe2\x80\x93 Dash')
- self.assertIn(u'\u2013', six.text_type(e))
+ if six.PY2:
+ self.assertIn(u'\u2013', six.text_type(e))
+ else:
+ self.assertIn('Long \xe2\x80\x93 Dash', six.text_type(e))
def test_invalid_unicode_string(self):
# NOTE(jamielennox): This is a complete failure case so what is
@@ -95,7 +98,12 @@ class ExceptionTestCase(tests.BaseTestCase):
# as there is an error with a message
e = exception.ValidationError(attribute='xx',
target='\xe7a va')
- self.assertIn('%(attribute)', six.text_type(e))
+
+ if six.PY2:
+ self.assertIn('%(attribute)', six.text_type(e))
+ else:
+ # There's no UnicodeDecodeError on python 3.
+ self.assertIn('\xe7a va', six.text_type(e))
class UnexpectedExceptionTestCase(ExceptionTestCase):
diff --git a/keystone-moon/keystone/tests/unit/test_hacking_checks.py b/keystone-moon/keystone/tests/unit/test_hacking_checks.py
index b9b047b3..962f5f8a 100644
--- a/keystone-moon/keystone/tests/unit/test_hacking_checks.py
+++ b/keystone-moon/keystone/tests/unit/test_hacking_checks.py
@@ -14,13 +14,13 @@ import textwrap
import mock
import pep8
-import testtools
-from keystone.hacking import checks
+from keystone.tests.hacking import checks
+from keystone.tests import unit
from keystone.tests.unit.ksfixtures import hacking as hacking_fixtures
-class BaseStyleCheck(testtools.TestCase):
+class BaseStyleCheck(unit.BaseTestCase):
def setUp(self):
super(BaseStyleCheck, self).setUp()
@@ -122,16 +122,6 @@ class TestCheckForNonDebugLoggingIssues(BaseStyleCheck):
self.assertEqual(expected_errors or [], actual_errors)
-class TestCheckOsloNamespaceImports(BaseStyleCheck):
- def get_checker(self):
- return checks.check_oslo_namespace_imports
-
- def test(self):
- code = self.code_ex.oslo_namespace_imports['code']
- errors = self.code_ex.oslo_namespace_imports['expected_errors']
- self.assert_has_errors(code, expected_errors=errors)
-
-
class TestDictConstructorWithSequenceCopy(BaseStyleCheck):
def get_checker(self):
diff --git a/keystone-moon/keystone/tests/unit/test_kvs.py b/keystone-moon/keystone/tests/unit/test_kvs.py
index 4d80ea33..77e05e6d 100644
--- a/keystone-moon/keystone/tests/unit/test_kvs.py
+++ b/keystone-moon/keystone/tests/unit/test_kvs.py
@@ -28,6 +28,7 @@ from keystone.common.kvs import core
from keystone import exception
from keystone.tests import unit as tests
+
NO_VALUE = api.NO_VALUE
@@ -487,6 +488,8 @@ class KVSTest(tests.TestCase):
memcached_expire_time=memcache_expire_time,
some_other_arg=uuid.uuid4().hex,
no_expiry_keys=[self.key_bar])
+ kvs_driver = kvs._region.backend.driver
+
# Ensure the set_arguments are correct
self.assertDictEqual(
kvs._region.backend._get_set_arguments_driver_attr(),
@@ -498,8 +501,8 @@ class KVSTest(tests.TestCase):
self.assertDictEqual(
kvs._region.backend.driver.client.set_arguments_passed,
expected_set_args)
- self.assertEqual(expected_foo_keys,
- kvs._region.backend.driver.client.keys_values.keys())
+ observed_foo_keys = list(kvs_driver.client.keys_values.keys())
+ self.assertEqual(expected_foo_keys, observed_foo_keys)
self.assertEqual(
self.value_foo,
kvs._region.backend.driver.client.keys_values[self.key_foo][0])
@@ -510,8 +513,8 @@ class KVSTest(tests.TestCase):
self.assertDictEqual(
kvs._region.backend.driver.client.set_arguments_passed,
expected_no_expiry_args)
- self.assertEqual(expected_bar_keys,
- kvs._region.backend.driver.client.keys_values.keys())
+ observed_bar_keys = list(kvs_driver.client.keys_values.keys())
+ self.assertEqual(expected_bar_keys, observed_bar_keys)
self.assertEqual(
self.value_bar,
kvs._region.backend.driver.client.keys_values[self.key_bar][0])
@@ -522,8 +525,8 @@ class KVSTest(tests.TestCase):
self.assertDictEqual(
kvs._region.backend.driver.client.set_arguments_passed,
expected_set_args)
- self.assertEqual(expected_foo_keys,
- kvs._region.backend.driver.client.keys_values.keys())
+ observed_foo_keys = list(kvs_driver.client.keys_values.keys())
+ self.assertEqual(expected_foo_keys, observed_foo_keys)
self.assertEqual(
self.value_foo,
kvs._region.backend.driver.client.keys_values[self.key_foo][0])
@@ -534,8 +537,8 @@ class KVSTest(tests.TestCase):
self.assertDictEqual(
kvs._region.backend.driver.client.set_arguments_passed,
expected_no_expiry_args)
- self.assertEqual(expected_bar_keys,
- kvs._region.backend.driver.client.keys_values.keys())
+ observed_bar_keys = list(kvs_driver.client.keys_values.keys())
+ self.assertEqual(expected_bar_keys, observed_bar_keys)
self.assertEqual(
self.value_bar,
kvs._region.backend.driver.client.keys_values[self.key_bar][0])
diff --git a/keystone-moon/keystone/tests/unit/test_ldap_livetest.py b/keystone-moon/keystone/tests/unit/test_ldap_livetest.py
index 5b449362..b9f56e8d 100644
--- a/keystone-moon/keystone/tests/unit/test_ldap_livetest.py
+++ b/keystone-moon/keystone/tests/unit/test_ldap_livetest.py
@@ -15,9 +15,9 @@
import subprocess
import uuid
-import ldap
import ldap.modlist
from oslo_config import cfg
+from six.moves import range
from keystone import exception
from keystone.identity.backends import ldap as identity_ldap
@@ -81,12 +81,6 @@ class LiveLDAPIdentity(test_backend_ldap.LDAPIdentity):
config_files.append(tests.dirs.tests_conf('backend_liveldap.conf'))
return config_files
- def config_overrides(self):
- super(LiveLDAPIdentity, self).config_overrides()
- self.config_fixture.config(
- group='identity',
- driver='keystone.identity.backends.ldap.Identity')
-
def test_build_tree(self):
"""Regression test for building the tree names
"""
@@ -95,9 +89,6 @@ class LiveLDAPIdentity(test_backend_ldap.LDAPIdentity):
self.assertTrue(user_api)
self.assertEqual(user_api.tree_dn, CONF.ldap.user_tree_dn)
- def tearDown(self):
- tests.TestCase.tearDown(self)
-
def test_ldap_dereferencing(self):
alt_users_ldif = {'objectclass': ['top', 'organizationalUnit'],
'ou': 'alt_users'}
@@ -176,8 +167,10 @@ class LiveLDAPIdentity(test_backend_ldap.LDAPIdentity):
negative_user['id'])
self.assertEqual(0, len(group_refs))
- self.config_fixture.config(group='ldap', group_filter='(dn=xx)')
- self.reload_backends(CONF.identity.default_domain_id)
+ driver = self.identity_api._select_identity_driver(
+ CONF.identity.default_domain_id)
+ driver.group.ldap_filter = '(dn=xx)'
+
group_refs = self.identity_api.list_groups_for_user(
positive_user['id'])
self.assertEqual(0, len(group_refs))
@@ -185,9 +178,8 @@ class LiveLDAPIdentity(test_backend_ldap.LDAPIdentity):
negative_user['id'])
self.assertEqual(0, len(group_refs))
- self.config_fixture.config(group='ldap',
- group_filter='(objectclass=*)')
- self.reload_backends(CONF.identity.default_domain_id)
+ driver.group.ldap_filter = '(objectclass=*)'
+
group_refs = self.identity_api.list_groups_for_user(
positive_user['id'])
self.assertEqual(GROUP_COUNT, len(group_refs))
diff --git a/keystone-moon/keystone/tests/unit/test_ldap_pool_livetest.py b/keystone-moon/keystone/tests/unit/test_ldap_pool_livetest.py
index 02fa8145..a8776e5b 100644
--- a/keystone-moon/keystone/tests/unit/test_ldap_pool_livetest.py
+++ b/keystone-moon/keystone/tests/unit/test_ldap_pool_livetest.py
@@ -30,10 +30,10 @@ CONF = cfg.CONF
class LiveLDAPPoolIdentity(test_backend_ldap_pool.LdapPoolCommonTestMixin,
test_ldap_livetest.LiveLDAPIdentity):
- """Executes existing LDAP live test with pooled LDAP handler to make
- sure it works without any error.
+ """Executes existing LDAP live test with pooled LDAP handler.
Also executes common pool specific tests via Mixin class.
+
"""
def setUp(self):
@@ -48,12 +48,6 @@ class LiveLDAPPoolIdentity(test_backend_ldap_pool.LdapPoolCommonTestMixin,
tests_conf('backend_pool_liveldap.conf'))
return config_files
- def config_overrides(self):
- super(LiveLDAPPoolIdentity, self).config_overrides()
- self.config_fixture.config(
- group='identity',
- driver='keystone.identity.backends.ldap.Identity')
-
def test_assert_connector_used_not_fake_ldap_pool(self):
handler = ldap_core._get_connection(CONF.ldap.url, use_pool=True)
self.assertNotEqual(type(handler.Connector),
diff --git a/keystone-moon/keystone/tests/unit/test_ldap_tls_livetest.py b/keystone-moon/keystone/tests/unit/test_ldap_tls_livetest.py
index d79c2bad..e77bbc98 100644
--- a/keystone-moon/keystone/tests/unit/test_ldap_tls_livetest.py
+++ b/keystone-moon/keystone/tests/unit/test_ldap_tls_livetest.py
@@ -13,7 +13,6 @@
# License for the specific language governing permissions and limitations
# under the License.
-import ldap
import ldap.modlist
from oslo_config import cfg
@@ -44,12 +43,6 @@ class LiveTLSLDAPIdentity(test_ldap_livetest.LiveLDAPIdentity):
config_files.append(tests.dirs.tests_conf('backend_tls_liveldap.conf'))
return config_files
- def config_overrides(self):
- super(LiveTLSLDAPIdentity, self).config_overrides()
- self.config_fixture.config(
- group='identity',
- driver='keystone.identity.backends.ldap.Identity')
-
def test_tls_certfile_demand_option(self):
self.config_fixture.config(group='ldap',
use_tls=True,
diff --git a/keystone-moon/keystone/tests/unit/test_policy.py b/keystone-moon/keystone/tests/unit/test_policy.py
index 2c0c3995..30df0b2b 100644
--- a/keystone-moon/keystone/tests/unit/test_policy.py
+++ b/keystone-moon/keystone/tests/unit/test_policy.py
@@ -14,6 +14,7 @@
# under the License.
import json
+import os
import mock
from oslo_policy import policy as common_policy
@@ -223,6 +224,48 @@ class PolicyJsonTestCase(tests.TestCase):
cloud_policy_keys = self._load_entries(
tests.dirs.etc('policy.v3cloudsample.json'))
- diffs = set(policy_keys).difference(set(cloud_policy_keys))
+ policy_extra_keys = ['admin_or_token_subject',
+ 'service_admin_or_token_subject',
+ 'token_subject', ]
+ expected_policy_keys = list(cloud_policy_keys) + policy_extra_keys
+ diffs = set(policy_keys).difference(set(expected_policy_keys))
self.assertThat(diffs, matchers.Equals(set()))
+
+ def test_all_targets_documented(self):
+ # All the targets in the sample policy file must be documented in
+ # doc/source/policy_mapping.rst.
+
+ policy_keys = self._load_entries(tests.dirs.etc('policy.json'))
+
+ # These keys are in the policy.json but aren't targets.
+ policy_rule_keys = [
+ 'admin_or_owner', 'admin_or_token_subject', 'admin_required',
+ 'default', 'owner', 'service_admin_or_token_subject',
+ 'service_or_admin', 'service_role', 'token_subject', ]
+
+ def read_doc_targets():
+ # Parse the doc/source/policy_mapping.rst file and return the
+ # targets.
+
+ doc_path = os.path.join(
+ tests.ROOTDIR, 'doc', 'source', 'policy_mapping.rst')
+ with open(doc_path) as doc_file:
+ for line in doc_file:
+ if line.startswith('Target'):
+ break
+ for line in doc_file:
+ # Skip === line
+ if line.startswith('==='):
+ break
+ for line in doc_file:
+ line = line.rstrip()
+ if not line or line.startswith(' '):
+ continue
+ if line.startswith('=='):
+ break
+ target, dummy, dummy = line.partition(' ')
+ yield six.text_type(target)
+
+ doc_targets = list(read_doc_targets())
+ self.assertItemsEqual(policy_keys, doc_targets + policy_rule_keys)
diff --git a/keystone-moon/keystone/tests/unit/test_revoke.py b/keystone-moon/keystone/tests/unit/test_revoke.py
index 727eff78..5394688c 100644
--- a/keystone-moon/keystone/tests/unit/test_revoke.py
+++ b/keystone-moon/keystone/tests/unit/test_revoke.py
@@ -16,8 +16,10 @@ import uuid
import mock
from oslo_utils import timeutils
+from six.moves import range
from testtools import matchers
+from keystone.common import utils
from keystone.contrib.revoke import model
from keystone import exception
from keystone.tests import unit as tests
@@ -112,6 +114,7 @@ def _matches(event, token_values):
class RevokeTests(object):
+
def test_list(self):
self.revoke_api.revoke_by_user(user_id=1)
self.assertEqual(1, len(self.revoke_api.list_events()))
@@ -140,8 +143,8 @@ class RevokeTests(object):
def test_expired_events_removed_validate_token_success(self, mock_utcnow):
def _sample_token_values():
token = _sample_blank_token()
- token['expires_at'] = timeutils.isotime(_future_time(),
- subsecond=True)
+ token['expires_at'] = utils.isotime(_future_time(),
+ subsecond=True)
return token
now = datetime.datetime.utcnow()
@@ -168,7 +171,7 @@ class RevokeTests(object):
def test_revoke_by_expiration_project_and_domain_fails(self):
user_id = _new_id()
- expires_at = timeutils.isotime(_future_time(), subsecond=True)
+ expires_at = utils.isotime(_future_time(), subsecond=True)
domain_id = _new_id()
project_id = _new_id()
self.assertThat(
@@ -181,24 +184,20 @@ class RevokeTests(object):
class SqlRevokeTests(test_backend_sql.SqlTests, RevokeTests):
def config_overrides(self):
super(SqlRevokeTests, self).config_overrides()
- self.config_fixture.config(
- group='revoke',
- driver='keystone.contrib.revoke.backends.sql.Revoke')
+ self.config_fixture.config(group='revoke', driver='sql')
self.config_fixture.config(
group='token',
- provider='keystone.token.providers.pki.Provider',
+ provider='pki',
revoke_by_id=False)
class KvsRevokeTests(tests.TestCase, RevokeTests):
def config_overrides(self):
super(KvsRevokeTests, self).config_overrides()
- self.config_fixture.config(
- group='revoke',
- driver='keystone.contrib.revoke.backends.kvs.Revoke')
+ self.config_fixture.config(group='revoke', driver='kvs')
self.config_fixture.config(
group='token',
- provider='keystone.token.providers.pki.Provider',
+ provider='pki',
revoke_by_id=False)
def setUp(self):
diff --git a/keystone-moon/keystone/tests/unit/test_sql_migrate_extensions.py b/keystone-moon/keystone/tests/unit/test_sql_migrate_extensions.py
index edfb91d7..87b3d48d 100644
--- a/keystone-moon/keystone/tests/unit/test_sql_migrate_extensions.py
+++ b/keystone-moon/keystone/tests/unit/test_sql_migrate_extensions.py
@@ -53,12 +53,6 @@ class SqlUpgradeExampleExtension(test_sql_upgrade.SqlMigrateBase):
self.upgrade(1, repository=self.repo_path)
self.assertTableColumns('example', ['id', 'type', 'extra'])
- def test_downgrade(self):
- self.upgrade(1, repository=self.repo_path)
- self.assertTableColumns('example', ['id', 'type', 'extra'])
- self.downgrade(0, repository=self.repo_path)
- self.assertTableDoesNotExist('example')
-
class SqlUpgradeOAuth1Extension(test_sql_upgrade.SqlMigrateBase):
def repo_package(self):
@@ -68,10 +62,6 @@ class SqlUpgradeOAuth1Extension(test_sql_upgrade.SqlMigrateBase):
super(SqlUpgradeOAuth1Extension, self).upgrade(
version, repository=self.repo_path)
- def downgrade(self, version):
- super(SqlUpgradeOAuth1Extension, self).downgrade(
- version, repository=self.repo_path)
-
def _assert_v1_3_tables(self):
self.assertTableColumns('consumer',
['id',
@@ -136,18 +126,6 @@ class SqlUpgradeOAuth1Extension(test_sql_upgrade.SqlMigrateBase):
self.upgrade(5)
self._assert_v4_later_tables()
- def test_downgrade(self):
- self.upgrade(5)
- self._assert_v4_later_tables()
- self.downgrade(3)
- self._assert_v1_3_tables()
- self.downgrade(1)
- self._assert_v1_3_tables()
- self.downgrade(0)
- self.assertTableDoesNotExist('consumer')
- self.assertTableDoesNotExist('request_token')
- self.assertTableDoesNotExist('access_token')
-
class EndpointFilterExtension(test_sql_upgrade.SqlMigrateBase):
def repo_package(self):
@@ -157,10 +135,6 @@ class EndpointFilterExtension(test_sql_upgrade.SqlMigrateBase):
super(EndpointFilterExtension, self).upgrade(
version, repository=self.repo_path)
- def downgrade(self, version):
- super(EndpointFilterExtension, self).downgrade(
- version, repository=self.repo_path)
-
def _assert_v1_tables(self):
self.assertTableColumns('project_endpoint',
['endpoint_id', 'project_id'])
@@ -184,14 +158,6 @@ class EndpointFilterExtension(test_sql_upgrade.SqlMigrateBase):
self.upgrade(2)
self._assert_v2_tables()
- def test_downgrade(self):
- self.upgrade(2)
- self._assert_v2_tables()
- self.downgrade(1)
- self._assert_v1_tables()
- self.downgrade(0)
- self.assertTableDoesNotExist('project_endpoint')
-
class EndpointPolicyExtension(test_sql_upgrade.SqlMigrateBase):
def repo_package(self):
@@ -204,14 +170,6 @@ class EndpointPolicyExtension(test_sql_upgrade.SqlMigrateBase):
['id', 'policy_id', 'endpoint_id',
'service_id', 'region_id'])
- def test_downgrade(self):
- self.upgrade(1, repository=self.repo_path)
- self.assertTableColumns('policy_association',
- ['id', 'policy_id', 'endpoint_id',
- 'service_id', 'region_id'])
- self.downgrade(0, repository=self.repo_path)
- self.assertTableDoesNotExist('policy_association')
-
class FederationExtension(test_sql_upgrade.SqlMigrateBase):
"""Test class for ensuring the Federation SQL."""
@@ -264,27 +222,7 @@ class FederationExtension(test_sql_upgrade.SqlMigrateBase):
'federation_protocol')
self.assertFalse(federation_protocol.c.mapping_id.nullable)
- def test_downgrade(self):
- self.upgrade(3, repository=self.repo_path)
- self.assertTableColumns(self.identity_provider,
- ['id', 'enabled', 'description'])
- self.assertTableColumns(self.federation_protocol,
- ['id', 'idp_id', 'mapping_id'])
- self.assertTableColumns(self.mapping,
- ['id', 'rules'])
-
- self.downgrade(2, repository=self.repo_path)
- federation_protocol = utils.get_table(
- self.engine,
- 'federation_protocol')
- self.assertTrue(federation_protocol.c.mapping_id.nullable)
-
- self.downgrade(0, repository=self.repo_path)
- self.assertTableDoesNotExist(self.identity_provider)
- self.assertTableDoesNotExist(self.federation_protocol)
- self.assertTableDoesNotExist(self.mapping)
-
- def test_fixup_service_provider_attributes(self):
+ def test_service_provider_attributes_cannot_be_null(self):
self.upgrade(6, repository=self.repo_path)
self.assertTableColumns(self.service_provider,
['id', 'description', 'enabled', 'auth_url',
@@ -325,12 +263,28 @@ class FederationExtension(test_sql_upgrade.SqlMigrateBase):
sp3)
session.close()
- self.downgrade(5, repository=self.repo_path)
+
+ def test_fixup_service_provider_attributes(self):
+ session = self.Session()
+ sp1 = {'id': uuid.uuid4().hex,
+ 'auth_url': None,
+ 'sp_url': uuid.uuid4().hex,
+ 'description': uuid.uuid4().hex,
+ 'enabled': True}
+ sp2 = {'id': uuid.uuid4().hex,
+ 'auth_url': uuid.uuid4().hex,
+ 'sp_url': None,
+ 'description': uuid.uuid4().hex,
+ 'enabled': True}
+ sp3 = {'id': uuid.uuid4().hex,
+ 'auth_url': None,
+ 'sp_url': None,
+ 'description': uuid.uuid4().hex,
+ 'enabled': True}
+ self.upgrade(5, repository=self.repo_path)
self.assertTableColumns(self.service_provider,
['id', 'description', 'enabled', 'auth_url',
'sp_url'])
- session = self.Session()
- self.metadata.clear()
# Before the migration, the table should accept null values
self.insert_dict(session, self.service_provider, sp1)
@@ -356,13 +310,20 @@ class FederationExtension(test_sql_upgrade.SqlMigrateBase):
self.assertEqual('', sp.auth_url)
self.assertEqual('', sp.sp_url)
-_REVOKE_COLUMN_NAMES = ['id', 'domain_id', 'project_id', 'user_id', 'role_id',
- 'trust_id', 'consumer_id', 'access_token_id',
- 'issued_before', 'expires_at', 'revoked_at']
+ def test_add_relay_state_column(self):
+ self.upgrade(8, repository=self.repo_path)
+ self.assertTableColumns(self.service_provider,
+ ['id', 'description', 'enabled', 'auth_url',
+ 'relay_state_prefix', 'sp_url'])
class RevokeExtension(test_sql_upgrade.SqlMigrateBase):
+ _REVOKE_COLUMN_NAMES = ['id', 'domain_id', 'project_id', 'user_id',
+ 'role_id', 'trust_id', 'consumer_id',
+ 'access_token_id', 'issued_before', 'expires_at',
+ 'revoked_at']
+
def repo_package(self):
return revoke
@@ -370,11 +331,4 @@ class RevokeExtension(test_sql_upgrade.SqlMigrateBase):
self.assertTableDoesNotExist('revocation_event')
self.upgrade(1, repository=self.repo_path)
self.assertTableColumns('revocation_event',
- _REVOKE_COLUMN_NAMES)
-
- def test_downgrade(self):
- self.upgrade(1, repository=self.repo_path)
- self.assertTableColumns('revocation_event',
- _REVOKE_COLUMN_NAMES)
- self.downgrade(0, repository=self.repo_path)
- self.assertTableDoesNotExist('revocation_event')
+ self._REVOKE_COLUMN_NAMES)
diff --git a/keystone-moon/keystone/tests/unit/test_sql_upgrade.py b/keystone-moon/keystone/tests/unit/test_sql_upgrade.py
index e50bad56..96dfa9e8 100644
--- a/keystone-moon/keystone/tests/unit/test_sql_upgrade.py
+++ b/keystone-moon/keystone/tests/unit/test_sql_upgrade.py
@@ -38,7 +38,6 @@ from oslo_config import cfg
from oslo_db import exception as db_exception
from oslo_db.sqlalchemy import migration
from oslo_db.sqlalchemy import session as db_session
-import six
from sqlalchemy.engine import reflection
import sqlalchemy.exc
from sqlalchemy import schema
@@ -158,6 +157,7 @@ class SqlMigrateBase(tests.SQLDriverOverrides, tests.TestCase):
# create and share a single sqlalchemy engine for testing
self.engine = sql.get_engine()
self.Session = db_session.get_maker(self.engine, autocommit=False)
+ self.addCleanup(sqlalchemy.orm.session.Session.close_all)
self.initialize_sql()
self.repo_path = migration_helpers.find_migrate_repo(
@@ -169,8 +169,12 @@ class SqlMigrateBase(tests.SQLDriverOverrides, tests.TestCase):
# auto-detect the highest available schema version in the migrate_repo
self.max_version = self.schema.repository.version().version
- def tearDown(self):
- sqlalchemy.orm.session.Session.close_all()
+ self.addCleanup(sql.cleanup)
+
+ # drop tables and FKs.
+ self.addCleanup(self._cleanupDB)
+
+ def _cleanupDB(self):
meta = sqlalchemy.MetaData()
meta.bind = self.engine
meta.reflect(self.engine)
@@ -193,14 +197,12 @@ class SqlMigrateBase(tests.SQLDriverOverrides, tests.TestCase):
all_fks.extend(fks)
for fkc in all_fks:
- conn.execute(schema.DropConstraint(fkc))
+ if self.engine.name != 'sqlite':
+ conn.execute(schema.DropConstraint(fkc))
for table in tbs:
conn.execute(schema.DropTable(table))
- sql.cleanup()
- super(SqlMigrateBase, self).tearDown()
-
def select_table(self, name):
table = sqlalchemy.Table(name,
self.metadata,
@@ -230,9 +232,6 @@ class SqlMigrateBase(tests.SQLDriverOverrides, tests.TestCase):
def upgrade(self, *args, **kwargs):
self._migrate(*args, **kwargs)
- def downgrade(self, *args, **kwargs):
- self._migrate(*args, downgrade=True, **kwargs)
-
def _migrate(self, version, repository=None, downgrade=False,
current_schema=None):
repository = repository or self.repo_path
@@ -278,42 +277,6 @@ class SqlUpgradeTests(SqlMigrateBase):
version,
'DB is not at version %s' % migrate_repo.DB_INIT_VERSION)
- def test_two_steps_forward_one_step_back(self):
- """You should be able to cleanly undo and re-apply all upgrades.
-
- Upgrades are run in the following order::
-
- Starting with the initial version defined at
- keystone.common.migrate_repo.DB_INIT_VERSION
-
- INIT +1 -> INIT +2 -> INIT +1 -> INIT +2 -> INIT +3 -> INIT +2 ...
- ^---------------------^ ^---------------------^
-
- Downgrade to the DB_INIT_VERSION does not occur based on the
- requirement that the base version be DB_INIT_VERSION + 1 before
- migration can occur. Downgrade below DB_INIT_VERSION + 1 is no longer
- supported.
-
- DB_INIT_VERSION is the number preceding the release schema version from
- two releases prior. Example, Juno releases with the DB_INIT_VERSION
- being 35 where Havana (Havana was two releases before Juno) release
- schema version is 36.
-
- The migrate utility requires the db must be initialized under version
- control with the revision directly before the first version to be
- applied.
-
- """
- for x in range(migrate_repo.DB_INIT_VERSION + 1,
- self.max_version + 1):
- self.upgrade(x)
- downgrade_ver = x - 1
- # Don't actually downgrade to the init version. This will raise
- # a not-implemented error.
- if downgrade_ver != migrate_repo.DB_INIT_VERSION:
- self.downgrade(x - 1)
- self.upgrade(x)
-
def test_upgrade_add_initial_tables(self):
self.upgrade(migrate_repo.DB_INIT_VERSION + 1)
self.check_initial_table_structure()
@@ -338,32 +301,6 @@ class SqlUpgradeTests(SqlMigrateBase):
for k in default_domain.keys():
self.assertEqual(default_domain[k], getattr(refs[0], k))
- def test_downgrade_to_db_init_version(self):
- self.upgrade(self.max_version)
-
- if self.engine.name == 'mysql':
- self._mysql_check_all_tables_innodb()
-
- self.downgrade(migrate_repo.DB_INIT_VERSION + 1)
- self.check_initial_table_structure()
-
- meta = sqlalchemy.MetaData()
- meta.bind = self.engine
- meta.reflect(self.engine)
-
- initial_table_set = set(INITIAL_TABLE_STRUCTURE.keys())
- table_set = set(meta.tables.keys())
- # explicitly remove the migrate_version table, this is not controlled
- # by the migration scripts and should be exempt from this check.
- table_set.remove('migrate_version')
-
- self.assertSetEqual(initial_table_set, table_set)
- # Downgrade to before Icehouse's release schema version (044) is not
- # supported. A NotImplementedError should be raised when attempting to
- # downgrade.
- self.assertRaises(NotImplementedError, self.downgrade,
- migrate_repo.DB_INIT_VERSION)
-
def insert_dict(self, session, table_name, d, table=None):
"""Naively inserts key-value pairs into a table, given a dictionary."""
if table is None:
@@ -380,8 +317,6 @@ class SqlUpgradeTests(SqlMigrateBase):
self.assertTableDoesNotExist('id_mapping')
self.upgrade(51)
self.assertTableExists('id_mapping')
- self.downgrade(50)
- self.assertTableDoesNotExist('id_mapping')
def test_region_url_upgrade(self):
self.upgrade(52)
@@ -389,42 +324,6 @@ class SqlUpgradeTests(SqlMigrateBase):
['id', 'description', 'parent_region_id',
'extra', 'url'])
- def test_region_url_downgrade(self):
- self.upgrade(52)
- self.downgrade(51)
- self.assertTableColumns('region',
- ['id', 'description', 'parent_region_id',
- 'extra'])
-
- def test_region_url_cleanup(self):
- # make sure that the url field is dropped in the downgrade
- self.upgrade(52)
- session = self.Session()
- beta = {
- 'id': uuid.uuid4().hex,
- 'description': uuid.uuid4().hex,
- 'parent_region_id': uuid.uuid4().hex,
- 'url': uuid.uuid4().hex
- }
- acme = {
- 'id': uuid.uuid4().hex,
- 'description': uuid.uuid4().hex,
- 'parent_region_id': uuid.uuid4().hex,
- 'url': None
- }
- self.insert_dict(session, 'region', beta)
- self.insert_dict(session, 'region', acme)
- region_table = sqlalchemy.Table('region', self.metadata, autoload=True)
- self.assertEqual(2, session.query(region_table).count())
- session.close()
- self.downgrade(51)
- session = self.Session()
- self.metadata.clear()
- region_table = sqlalchemy.Table('region', self.metadata, autoload=True)
- self.assertEqual(2, session.query(region_table).count())
- region = session.query(region_table)[0]
- self.assertRaises(AttributeError, getattr, region, 'url')
-
def test_endpoint_region_upgrade_columns(self):
self.upgrade(53)
self.assertTableColumns('endpoint',
@@ -439,21 +338,6 @@ class SqlUpgradeTests(SqlMigrateBase):
autoload=True)
self.assertEqual(255, endpoint_table.c.region_id.type.length)
- def test_endpoint_region_downgrade_columns(self):
- self.upgrade(53)
- self.downgrade(52)
- self.assertTableColumns('endpoint',
- ['id', 'legacy_endpoint_id', 'interface',
- 'service_id', 'url', 'extra', 'enabled',
- 'region'])
- region_table = sqlalchemy.Table('region', self.metadata, autoload=True)
- self.assertEqual(64, region_table.c.id.type.length)
- self.assertEqual(64, region_table.c.parent_region_id.type.length)
- endpoint_table = sqlalchemy.Table('endpoint',
- self.metadata,
- autoload=True)
- self.assertEqual(255, endpoint_table.c.region.type.length)
-
def test_endpoint_region_migration(self):
self.upgrade(52)
session = self.Session()
@@ -519,106 +403,29 @@ class SqlUpgradeTests(SqlMigrateBase):
self.assertEqual(1, session.query(endpoint_table).
filter_by(region_id=_small_region_name).count())
- # downgrade to 52
- session.close()
- self.downgrade(52)
- session = self.Session()
- self.metadata.clear()
-
- region_table = sqlalchemy.Table('region', self.metadata, autoload=True)
- self.assertEqual(1, session.query(region_table).count())
- self.assertEqual(1, session.query(region_table).
- filter_by(id=_small_region_name).count())
-
- endpoint_table = sqlalchemy.Table('endpoint',
- self.metadata,
- autoload=True)
- self.assertEqual(5, session.query(endpoint_table).count())
- self.assertEqual(2, session.query(endpoint_table).
- filter_by(region=_long_region_name).count())
- self.assertEqual(1, session.query(endpoint_table).
- filter_by(region=_clashing_region_name).count())
- self.assertEqual(1, session.query(endpoint_table).
- filter_by(region=_small_region_name).count())
-
def test_add_actor_id_index(self):
self.upgrade(53)
self.upgrade(54)
table = sqlalchemy.Table('assignment', self.metadata, autoload=True)
- index_data = [(idx.name, idx.columns.keys()) for idx in table.indexes]
+ index_data = [(idx.name, list(idx.columns.keys()))
+ for idx in table.indexes]
self.assertIn(('ix_actor_id', ['actor_id']), index_data)
def test_token_user_id_and_trust_id_index_upgrade(self):
self.upgrade(54)
self.upgrade(55)
table = sqlalchemy.Table('token', self.metadata, autoload=True)
- index_data = [(idx.name, idx.columns.keys()) for idx in table.indexes]
+ index_data = [(idx.name, list(idx.columns.keys()))
+ for idx in table.indexes]
self.assertIn(('ix_token_user_id', ['user_id']), index_data)
self.assertIn(('ix_token_trust_id', ['trust_id']), index_data)
- def test_token_user_id_and_trust_id_index_downgrade(self):
- self.upgrade(55)
- self.downgrade(54)
- table = sqlalchemy.Table('token', self.metadata, autoload=True)
- index_data = [(idx.name, idx.columns.keys()) for idx in table.indexes]
- self.assertNotIn(('ix_token_user_id', ['user_id']), index_data)
- self.assertNotIn(('ix_token_trust_id', ['trust_id']), index_data)
-
- def test_remove_actor_id_index(self):
- self.upgrade(54)
- self.downgrade(53)
- table = sqlalchemy.Table('assignment', self.metadata, autoload=True)
- index_data = [(idx.name, idx.columns.keys()) for idx in table.indexes]
- self.assertNotIn(('ix_actor_id', ['actor_id']), index_data)
-
def test_project_parent_id_upgrade(self):
self.upgrade(61)
self.assertTableColumns('project',
['id', 'name', 'extra', 'description',
'enabled', 'domain_id', 'parent_id'])
- def test_project_parent_id_downgrade(self):
- self.upgrade(61)
- self.downgrade(60)
- self.assertTableColumns('project',
- ['id', 'name', 'extra', 'description',
- 'enabled', 'domain_id'])
-
- def test_project_parent_id_cleanup(self):
- # make sure that the parent_id field is dropped in the downgrade
- self.upgrade(61)
- session = self.Session()
- domain = {'id': uuid.uuid4().hex,
- 'name': uuid.uuid4().hex,
- 'enabled': True}
- acme = {
- 'id': uuid.uuid4().hex,
- 'description': uuid.uuid4().hex,
- 'domain_id': domain['id'],
- 'name': uuid.uuid4().hex,
- 'parent_id': None
- }
- beta = {
- 'id': uuid.uuid4().hex,
- 'description': uuid.uuid4().hex,
- 'domain_id': domain['id'],
- 'name': uuid.uuid4().hex,
- 'parent_id': acme['id']
- }
- self.insert_dict(session, 'domain', domain)
- self.insert_dict(session, 'project', acme)
- self.insert_dict(session, 'project', beta)
- proj_table = sqlalchemy.Table('project', self.metadata, autoload=True)
- self.assertEqual(2, session.query(proj_table).count())
- session.close()
- self.downgrade(60)
- session = self.Session()
- self.metadata.clear()
- proj_table = sqlalchemy.Table('project', self.metadata, autoload=True)
- self.assertEqual(2, session.query(proj_table).count())
- project = session.query(proj_table)[0]
- self.assertRaises(AttributeError, getattr, project, 'parent_id')
-
def test_drop_assignment_role_fk(self):
self.upgrade(61)
self.assertTrue(self.does_fk_exist('assignment', 'role_id'))
@@ -626,8 +433,80 @@ class SqlUpgradeTests(SqlMigrateBase):
if self.engine.name != 'sqlite':
# sqlite does not support FK deletions (or enforcement)
self.assertFalse(self.does_fk_exist('assignment', 'role_id'))
- self.downgrade(61)
- self.assertTrue(self.does_fk_exist('assignment', 'role_id'))
+
+ def test_insert_assignment_inherited_pk(self):
+ ASSIGNMENT_TABLE_NAME = 'assignment'
+ INHERITED_COLUMN_NAME = 'inherited'
+ ROLE_TABLE_NAME = 'role'
+
+ self.upgrade(72)
+
+ # Check that the 'inherited' column is not part of the PK
+ self.assertFalse(self.does_pk_exist(ASSIGNMENT_TABLE_NAME,
+ INHERITED_COLUMN_NAME))
+
+ session = self.Session()
+
+ role = {'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex}
+ self.insert_dict(session, ROLE_TABLE_NAME, role)
+
+ # Create both inherited and noninherited role assignments
+ inherited = {'type': 'UserProject',
+ 'actor_id': uuid.uuid4().hex,
+ 'target_id': uuid.uuid4().hex,
+ 'role_id': role['id'],
+ 'inherited': True}
+
+ noninherited = inherited.copy()
+ noninherited['inherited'] = False
+
+ # Create another inherited role assignment as a spoiler
+ spoiler = inherited.copy()
+ spoiler['actor_id'] = uuid.uuid4().hex
+
+ self.insert_dict(session, ASSIGNMENT_TABLE_NAME, inherited)
+ self.insert_dict(session, ASSIGNMENT_TABLE_NAME, spoiler)
+
+ # Since 'inherited' is not part of the PK, we can't insert noninherited
+ self.assertRaises(db_exception.DBDuplicateEntry,
+ self.insert_dict,
+ session,
+ ASSIGNMENT_TABLE_NAME,
+ noninherited)
+
+ session.close()
+
+ self.upgrade(73)
+
+ session = self.Session()
+ self.metadata.clear()
+
+ # Check that the 'inherited' column is now part of the PK
+ self.assertTrue(self.does_pk_exist(ASSIGNMENT_TABLE_NAME,
+ INHERITED_COLUMN_NAME))
+
+ # The noninherited role assignment can now be inserted
+ self.insert_dict(session, ASSIGNMENT_TABLE_NAME, noninherited)
+
+ assignment_table = sqlalchemy.Table(ASSIGNMENT_TABLE_NAME,
+ self.metadata,
+ autoload=True)
+
+ assignments = session.query(assignment_table).all()
+ for assignment in (inherited, spoiler, noninherited):
+ self.assertIn((assignment['type'], assignment['actor_id'],
+ assignment['target_id'], assignment['role_id'],
+ assignment['inherited']),
+ assignments)
+
+ def does_pk_exist(self, table, pk_column):
+ """Checks whether a column is primary key on a table."""
+
+ inspector = reflection.Inspector.from_engine(self.engine)
+ pk_columns = inspector.get_pk_constraint(table)['constrained_columns']
+
+ return pk_column in pk_columns
def does_fk_exist(self, table, fk_column):
inspector = reflection.Inspector.from_engine(self.engine)
@@ -642,14 +521,7 @@ class SqlUpgradeTests(SqlMigrateBase):
['id', 'description', 'parent_region_id',
'extra'])
- def test_drop_region_url_downgrade(self):
- self.upgrade(63)
- self.downgrade(62)
- self.assertTableColumns('region',
- ['id', 'description', 'parent_region_id',
- 'extra', 'url'])
-
- def test_drop_domain_fk(self):
+ def test_domain_fk(self):
self.upgrade(63)
self.assertTrue(self.does_fk_exist('group', 'domain_id'))
self.assertTrue(self.does_fk_exist('user', 'domain_id'))
@@ -658,9 +530,6 @@ class SqlUpgradeTests(SqlMigrateBase):
# sqlite does not support FK deletions (or enforcement)
self.assertFalse(self.does_fk_exist('group', 'domain_id'))
self.assertFalse(self.does_fk_exist('user', 'domain_id'))
- self.downgrade(63)
- self.assertTrue(self.does_fk_exist('group', 'domain_id'))
- self.assertTrue(self.does_fk_exist('user', 'domain_id'))
def test_add_domain_config(self):
whitelisted_table = 'whitelisted_config'
@@ -673,9 +542,6 @@ class SqlUpgradeTests(SqlMigrateBase):
['domain_id', 'group', 'option', 'value'])
self.assertTableColumns(sensitive_table,
['domain_id', 'group', 'option', 'value'])
- self.downgrade(64)
- self.assertTableDoesNotExist(whitelisted_table)
- self.assertTableDoesNotExist(sensitive_table)
def test_fixup_service_name_value_upgrade(self):
"""Update service name data from `extra` to empty string."""
@@ -724,6 +590,10 @@ class SqlUpgradeTests(SqlMigrateBase):
random_attr_name_empty, random_attr_name_none_str),
]
+ # NOTE(viktors): Add a service with empty extra field
+ self.insert_dict(session, 'service',
+ {'id': uuid.uuid4().hex, 'type': uuid.uuid4().hex})
+
session.close()
self.upgrade(66)
session = self.Session()
@@ -744,6 +614,28 @@ class SqlUpgradeTests(SqlMigrateBase):
extra = fetch_service_extra(service_id)
self.assertDictEqual(exp_extra, extra, msg)
+ def _does_index_exist(self, table_name, index_name):
+ meta = sqlalchemy.MetaData(bind=self.engine)
+ table = sqlalchemy.Table('assignment', meta, autoload=True)
+ return index_name in [idx.name for idx in table.indexes]
+
+ def test_drop_assignment_role_id_index_mysql(self):
+ self.upgrade(66)
+ if self.engine.name == "mysql":
+ self.assertTrue(self._does_index_exist('assignment',
+ 'assignment_role_id_fkey'))
+ self.upgrade(67)
+ if self.engine.name == "mysql":
+ self.assertFalse(self._does_index_exist('assignment',
+ 'assignment_role_id_fkey'))
+
+ def test_project_is_domain_upgrade(self):
+ self.upgrade(74)
+ self.assertTableColumns('project',
+ ['id', 'name', 'extra', 'description',
+ 'enabled', 'domain_id', 'parent_id',
+ 'is_domain'])
+
def populate_user_table(self, with_pass_enab=False,
with_pass_enab_domain=False):
# Populate the appropriate fields in the user
@@ -881,6 +773,13 @@ class VersionTests(SqlMigrateBase):
version = migration_helpers.get_db_version()
self.assertEqual(self.max_version, version)
+ def test_assert_not_schema_downgrade(self):
+ self.upgrade(self.max_version)
+ self.assertRaises(
+ db_exception.DbMigrationError,
+ migration_helpers._sync_common_repo,
+ self.max_version - 1)
+
def test_extension_not_controlled(self):
"""When get the version before controlling, raises DbMigrationError."""
self.assertRaises(db_exception.DbMigrationError,
@@ -889,7 +788,7 @@ class VersionTests(SqlMigrateBase):
def test_extension_initial(self):
"""When get the initial version of an extension, it's 0."""
- for name, extension in six.iteritems(EXTENSIONS):
+ for name, extension in EXTENSIONS.items():
abs_path = migration_helpers.find_migrate_repo(extension)
migration.db_version_control(sql.get_engine(), abs_path)
version = migration_helpers.get_db_version(extension=name)
@@ -898,18 +797,7 @@ class VersionTests(SqlMigrateBase):
def test_extension_migrated(self):
"""When get the version after migrating an extension, it's not 0."""
- for name, extension in six.iteritems(EXTENSIONS):
- abs_path = migration_helpers.find_migrate_repo(extension)
- migration.db_version_control(sql.get_engine(), abs_path)
- migration.db_sync(sql.get_engine(), abs_path)
- version = migration_helpers.get_db_version(extension=name)
- self.assertTrue(
- version > 0,
- "Version for %s didn't change after migrated?" % name)
-
- def test_extension_downgraded(self):
- """When get the version after downgrading an extension, it is 0."""
- for name, extension in six.iteritems(EXTENSIONS):
+ for name, extension in EXTENSIONS.items():
abs_path = migration_helpers.find_migrate_repo(extension)
migration.db_version_control(sql.get_engine(), abs_path)
migration.db_sync(sql.get_engine(), abs_path)
@@ -917,10 +805,47 @@ class VersionTests(SqlMigrateBase):
self.assertTrue(
version > 0,
"Version for %s didn't change after migrated?" % name)
- migration.db_sync(sql.get_engine(), abs_path, version=0)
- version = migration_helpers.get_db_version(extension=name)
- self.assertEqual(0, version,
- 'Migrate version for %s is not 0' % name)
+ # Verify downgrades cannot occur
+ self.assertRaises(
+ db_exception.DbMigrationError,
+ migration_helpers._sync_extension_repo,
+ extension=name,
+ version=0)
+
+ def test_extension_federation_upgraded_values(self):
+ abs_path = migration_helpers.find_migrate_repo(federation)
+ migration.db_version_control(sql.get_engine(), abs_path)
+ migration.db_sync(sql.get_engine(), abs_path, version=6)
+ idp_table = sqlalchemy.Table("identity_provider",
+ self.metadata,
+ autoload=True)
+ idps = [{'id': uuid.uuid4().hex,
+ 'enabled': True,
+ 'description': uuid.uuid4().hex,
+ 'remote_id': uuid.uuid4().hex},
+ {'id': uuid.uuid4().hex,
+ 'enabled': True,
+ 'description': uuid.uuid4().hex,
+ 'remote_id': uuid.uuid4().hex}]
+ for idp in idps:
+ ins = idp_table.insert().values({'id': idp['id'],
+ 'enabled': idp['enabled'],
+ 'description': idp['description'],
+ 'remote_id': idp['remote_id']})
+ self.engine.execute(ins)
+ migration.db_sync(sql.get_engine(), abs_path)
+ idp_remote_ids_table = sqlalchemy.Table("idp_remote_ids",
+ self.metadata,
+ autoload=True)
+ for idp in idps:
+ s = idp_remote_ids_table.select().where(
+ idp_remote_ids_table.c.idp_id == idp['id'])
+ remote = self.engine.execute(s).fetchone()
+ self.assertEqual(idp['remote_id'],
+ remote['remote_id'],
+ 'remote_ids must be preserved during the '
+ 'migration from identity_provider table to '
+ 'idp_remote_ids table')
def test_unexpected_extension(self):
"""The version for an extension that doesn't exist raises ImportError.
diff --git a/keystone-moon/keystone/tests/unit/test_ssl.py b/keystone-moon/keystone/tests/unit/test_ssl.py
index c5f443b0..3b86bb2d 100644
--- a/keystone-moon/keystone/tests/unit/test_ssl.py
+++ b/keystone-moon/keystone/tests/unit/test_ssl.py
@@ -36,6 +36,16 @@ CLIENT = os.path.join(CERTDIR, 'middleware.pem')
class SSLTestCase(tests.TestCase):
def setUp(self):
super(SSLTestCase, self).setUp()
+ raise self.skipTest('SSL Version and Ciphers cannot be configured '
+ 'with eventlet, some platforms have disabled '
+ 'SSLv3. See bug 1381365.')
+ # NOTE(morganfainberg): It has been determined that this
+ # will not be fixed. These tests should be re-enabled for the full
+ # functional test suite when run against an SSL terminated
+ # endpoint. Some distributions/environments have patched OpenSSL to
+ # not have SSLv3 at all due to POODLE and this causes differing
+ # behavior depending on platform. See bug 1381365 for more information.
+
# NOTE(jamespage):
# Deal with more secure certificate chain verification
# introduced in python 2.7.9 under PEP-0476
diff --git a/keystone-moon/keystone/tests/unit/test_token_provider.py b/keystone-moon/keystone/tests/unit/test_token_provider.py
index dc08664f..3ebb0187 100644
--- a/keystone-moon/keystone/tests/unit/test_token_provider.py
+++ b/keystone-moon/keystone/tests/unit/test_token_provider.py
@@ -18,11 +18,14 @@ from oslo_config import cfg
from oslo_utils import timeutils
from keystone.common import dependency
+from keystone.common import utils
from keystone import exception
from keystone.tests import unit as tests
from keystone.tests.unit.ksfixtures import database
from keystone import token
+from keystone.token.providers import fernet
from keystone.token.providers import pki
+from keystone.token.providers import pkiz
from keystone.token.providers import uuid
@@ -655,8 +658,8 @@ def create_v2_token():
return {
"access": {
"token": {
- "expires": timeutils.isotime(timeutils.utcnow() +
- FUTURE_DELTA),
+ "expires": utils.isotime(timeutils.utcnow() +
+ FUTURE_DELTA),
"issued_at": "2013-05-21T00:02:43.941473Z",
"tenant": {
"enabled": True,
@@ -671,7 +674,7 @@ def create_v2_token():
SAMPLE_V2_TOKEN_EXPIRED = {
"access": {
"token": {
- "expires": timeutils.isotime(CURRENT_DATE),
+ "expires": utils.isotime(CURRENT_DATE),
"issued_at": "2013-05-21T00:02:43.941473Z",
"tenant": {
"enabled": True,
@@ -687,7 +690,7 @@ def create_v3_token():
return {
"token": {
'methods': [],
- "expires_at": timeutils.isotime(timeutils.utcnow() + FUTURE_DELTA),
+ "expires_at": utils.isotime(timeutils.utcnow() + FUTURE_DELTA),
"issued_at": "2013-05-21T00:02:43.941473Z",
}
}
@@ -695,7 +698,7 @@ def create_v3_token():
SAMPLE_V3_TOKEN_EXPIRED = {
"token": {
- "expires_at": timeutils.isotime(CURRENT_DATE),
+ "expires_at": utils.isotime(CURRENT_DATE),
"issued_at": "2013-05-21T00:02:43.941473Z",
}
}
@@ -742,22 +745,20 @@ class TestTokenProvider(tests.TestCase):
uuid.Provider)
dependency.reset()
- self.config_fixture.config(
- group='token',
- provider='keystone.token.providers.uuid.Provider')
- token.provider.Manager()
+ self.config_fixture.config(group='token', provider='uuid')
+ self.assertIsInstance(token.provider.Manager().driver, uuid.Provider)
dependency.reset()
- self.config_fixture.config(
- group='token',
- provider='keystone.token.providers.pki.Provider')
- token.provider.Manager()
+ self.config_fixture.config(group='token', provider='pki')
+ self.assertIsInstance(token.provider.Manager().driver, pki.Provider)
dependency.reset()
- self.config_fixture.config(
- group='token',
- provider='keystone.token.providers.pkiz.Provider')
- token.provider.Manager()
+ self.config_fixture.config(group='token', provider='pkiz')
+ self.assertIsInstance(token.provider.Manager().driver, pkiz.Provider)
+
+ dependency.reset()
+ self.config_fixture.config(group='token', provider='fernet')
+ self.assertIsInstance(token.provider.Manager().driver, fernet.Provider)
def test_unsupported_token_provider(self):
self.config_fixture.config(group='token',
diff --git a/keystone-moon/keystone/tests/unit/test_v2.py b/keystone-moon/keystone/tests/unit/test_v2.py
index 8c7c3792..415150cf 100644
--- a/keystone-moon/keystone/tests/unit/test_v2.py
+++ b/keystone-moon/keystone/tests/unit/test_v2.py
@@ -56,6 +56,8 @@ class CoreApiTests(object):
def assertValidTenant(self, tenant):
self.assertIsNotNone(tenant.get('id'))
self.assertIsNotNone(tenant.get('name'))
+ self.assertNotIn('domain_id', tenant)
+ self.assertNotIn('parent_id', tenant)
def assertValidUser(self, user):
self.assertIsNotNone(user.get('id'))
@@ -1373,12 +1375,10 @@ class V2TestCase(RestfulTestCase, CoreApiTests, LegacyV2UsernameTests):
class RevokeApiTestCase(V2TestCase):
def config_overrides(self):
super(RevokeApiTestCase, self).config_overrides()
- self.config_fixture.config(
- group='revoke',
- driver='keystone.contrib.revoke.backends.kvs.Revoke')
+ self.config_fixture.config(group='revoke', driver='kvs')
self.config_fixture.config(
group='token',
- provider='keystone.token.providers.pki.Provider',
+ provider='pki',
revoke_by_id=False)
def test_fetch_revocation_list_admin_200(self):
@@ -1410,9 +1410,7 @@ class TestFernetTokenProviderV2(RestfulTestCase):
def config_overrides(self):
super(TestFernetTokenProviderV2, self).config_overrides()
- self.config_fixture.config(
- group='token',
- provider='keystone.token.providers.fernet.Provider')
+ self.config_fixture.config(group='token', provider='fernet')
def test_authenticate_unscoped_token(self):
unscoped_token = self.get_unscoped_token()
@@ -1498,3 +1496,44 @@ class TestFernetTokenProviderV2(RestfulTestCase):
path=path,
token=CONF.admin_token,
expected_status=200)
+
+ def test_rescoped_tokens_maintain_original_expiration(self):
+ project_ref = self.new_project_ref()
+ self.resource_api.create_project(project_ref['id'], project_ref)
+ self.assignment_api.add_role_to_user_and_project(self.user_foo['id'],
+ project_ref['id'],
+ self.role_admin['id'])
+ resp = self.public_request(
+ method='POST',
+ path='/v2.0/tokens',
+ body={
+ 'auth': {
+ 'tenantName': project_ref['name'],
+ 'passwordCredentials': {
+ 'username': self.user_foo['name'],
+ 'password': self.user_foo['password']
+ }
+ }
+ },
+ # NOTE(lbragstad): This test may need to be refactored if Keystone
+ # decides to disallow rescoping using a scoped token.
+ expected_status=200)
+ original_token = resp.result['access']['token']['id']
+ original_expiration = resp.result['access']['token']['expires']
+
+ resp = self.public_request(
+ method='POST',
+ path='/v2.0/tokens',
+ body={
+ 'auth': {
+ 'tenantName': project_ref['name'],
+ 'token': {
+ 'id': original_token,
+ }
+ }
+ },
+ expected_status=200)
+ rescoped_token = resp.result['access']['token']['id']
+ rescoped_expiration = resp.result['access']['token']['expires']
+ self.assertNotEqual(original_token, rescoped_token)
+ self.assertEqual(original_expiration, rescoped_expiration)
diff --git a/keystone-moon/keystone/tests/unit/test_v2_controller.py b/keystone-moon/keystone/tests/unit/test_v2_controller.py
index 6c1edd0a..0d4b3cdc 100644
--- a/keystone-moon/keystone/tests/unit/test_v2_controller.py
+++ b/keystone-moon/keystone/tests/unit/test_v2_controller.py
@@ -16,6 +16,7 @@
import uuid
from keystone.assignment import controllers as assignment_controllers
+from keystone import exception
from keystone.resource import controllers as resource_controllers
from keystone.tests import unit as tests
from keystone.tests.unit import default_fixtures
@@ -92,4 +93,51 @@ class TenantTestCase(tests.TestCase):
for tenant in default_fixtures.TENANTS:
tenant_copy = tenant.copy()
tenant_copy.pop('domain_id')
+ tenant_copy.pop('parent_id')
+ tenant_copy.pop('is_domain')
self.assertIn(tenant_copy, refs['tenants'])
+
+ def _create_is_domain_project(self):
+ project = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
+ 'domain_id': 'default', 'is_domain': True}
+ project_ref = self.resource_api.create_project(project['id'], project)
+ return self.tenant_controller.v3_to_v2_project(project_ref)
+
+ def test_update_is_domain_project_not_found(self):
+ """Test that update is_domain project is not allowed in v2."""
+ project = self._create_is_domain_project()
+
+ project['name'] = uuid.uuid4().hex
+ self.assertRaises(
+ exception.ProjectNotFound,
+ self.tenant_controller.update_project,
+ _ADMIN_CONTEXT,
+ project['id'],
+ project
+ )
+
+ def test_delete_is_domain_project_not_found(self):
+ """Test that delete is_domain project is not allowed in v2."""
+ project = self._create_is_domain_project()
+
+ self.assertRaises(
+ exception.ProjectNotFound,
+ self.tenant_controller.delete_project,
+ _ADMIN_CONTEXT,
+ project['id']
+ )
+
+ def test_list_is_domain_project_not_found(self):
+ """Test v2 get_all_projects having projects that act as a domain.
+
+ In v2 no project with the is_domain flag enabled should be
+ returned.
+ """
+ project1 = self._create_is_domain_project()
+ project2 = self._create_is_domain_project()
+
+ refs = self.tenant_controller.get_all_projects(_ADMIN_CONTEXT)
+ projects = refs.get('tenants')
+
+ self.assertNotIn(project1, projects)
+ self.assertNotIn(project2, projects)
diff --git a/keystone-moon/keystone/tests/unit/test_v2_keystoneclient.py b/keystone-moon/keystone/tests/unit/test_v2_keystoneclient.py
index 7abc5bc4..e0843605 100644
--- a/keystone-moon/keystone/tests/unit/test_v2_keystoneclient.py
+++ b/keystone-moon/keystone/tests/unit/test_v2_keystoneclient.py
@@ -15,12 +15,14 @@
import datetime
import uuid
+from keystoneclient.contrib.ec2 import utils as ec2_utils
from keystoneclient import exceptions as client_exceptions
from keystoneclient.v2_0 import client as ks_client
import mock
from oslo_config import cfg
from oslo_serialization import jsonutils
from oslo_utils import timeutils
+from six.moves import range
import webob
from keystone.tests import unit as tests
@@ -35,6 +37,11 @@ DEFAULT_DOMAIN_ID = CONF.identity.default_domain_id
class ClientDrivenTestCase(tests.TestCase):
+ def config_files(self):
+ config_files = super(ClientDrivenTestCase, self).config_files()
+ config_files.append(tests.dirs.tests_conf('backend_sql.conf'))
+ return config_files
+
def setUp(self):
super(ClientDrivenTestCase, self).setUp()
@@ -61,7 +68,10 @@ class ClientDrivenTestCase(tests.TestCase):
fixture = self.useFixture(appserver.AppServer(conf, appserver.ADMIN))
self.admin_server = fixture.server
- self.addCleanup(self.cleanup_instance('public_server', 'admin_server'))
+ self.default_client = self.get_client()
+
+ self.addCleanup(self.cleanup_instance('public_server', 'admin_server',
+ 'default_client'))
def _public_url(self):
public_port = self.public_server.socket_info['socket'][1]
@@ -707,6 +717,20 @@ class ClientDrivenTestCase(tests.TestCase):
client.roles.create,
name="")
+ def test_role_create_member_role(self):
+ # delete the member role so that we can recreate it
+ client = self.get_client(admin=True)
+ client.roles.delete(role=CONF.member_role_id)
+
+ # deleting the member role revokes our token, so re-authenticate
+ client = self.get_client(admin=True)
+
+ # specify only the role name on creation
+ role = client.roles.create(name=CONF.member_role_name)
+
+ # the ID should be set as defined in CONF
+ self.assertEqual(CONF.member_role_id, role.id)
+
def test_role_get_404(self):
client = self.get_client(admin=True)
self.assertRaises(client_exceptions.NotFound,
@@ -1043,3 +1067,308 @@ class ClientDrivenTestCase(tests.TestCase):
self.assertRaises(client_exceptions.Unauthorized, client.tenants.list)
client.auth_token = new_token_id
client.tenants.list()
+
+ def test_endpoint_crud(self):
+ client = self.get_client(admin=True)
+
+ service = client.services.create(name=uuid.uuid4().hex,
+ service_type=uuid.uuid4().hex,
+ description=uuid.uuid4().hex)
+
+ endpoint_region = uuid.uuid4().hex
+ invalid_service_id = uuid.uuid4().hex
+ endpoint_publicurl = uuid.uuid4().hex
+ endpoint_internalurl = uuid.uuid4().hex
+ endpoint_adminurl = uuid.uuid4().hex
+
+ # a non-existent service ID should trigger a 400
+ self.assertRaises(client_exceptions.BadRequest,
+ client.endpoints.create,
+ region=endpoint_region,
+ service_id=invalid_service_id,
+ publicurl=endpoint_publicurl,
+ adminurl=endpoint_adminurl,
+ internalurl=endpoint_internalurl)
+
+ endpoint = client.endpoints.create(region=endpoint_region,
+ service_id=service.id,
+ publicurl=endpoint_publicurl,
+ adminurl=endpoint_adminurl,
+ internalurl=endpoint_internalurl)
+
+ self.assertEqual(endpoint_region, endpoint.region)
+ self.assertEqual(service.id, endpoint.service_id)
+ self.assertEqual(endpoint_publicurl, endpoint.publicurl)
+ self.assertEqual(endpoint_internalurl, endpoint.internalurl)
+ self.assertEqual(endpoint_adminurl, endpoint.adminurl)
+
+ client.endpoints.delete(id=endpoint.id)
+ self.assertRaises(client_exceptions.NotFound, client.endpoints.delete,
+ id=endpoint.id)
+
+ def _send_ec2_auth_request(self, credentials, client=None):
+ if not client:
+ client = self.default_client
+ url = '%s/ec2tokens' % self.default_client.auth_url
+ (resp, token) = client.request(
+ url=url, method='POST',
+ body={'credentials': credentials})
+ return resp, token
+
+ def _generate_default_user_ec2_credentials(self):
+ cred = self. default_client.ec2.create(
+ user_id=self.user_foo['id'],
+ tenant_id=self.tenant_bar['id'])
+ return self._generate_user_ec2_credentials(cred.access, cred.secret)
+
+ def _generate_user_ec2_credentials(self, access, secret):
+ signer = ec2_utils.Ec2Signer(secret)
+ credentials = {'params': {'SignatureVersion': '2'},
+ 'access': access,
+ 'verb': 'GET',
+ 'host': 'localhost',
+ 'path': '/service/cloud'}
+ signature = signer.generate(credentials)
+ return credentials, signature
+
+ def test_ec2_auth_success(self):
+ credentials, signature = self._generate_default_user_ec2_credentials()
+ credentials['signature'] = signature
+ resp, token = self._send_ec2_auth_request(credentials)
+ self.assertEqual(200, resp.status_code)
+ self.assertIn('access', token)
+
+ def test_ec2_auth_success_trust(self):
+ # Add "other" role user_foo and create trust delegating it to user_two
+ self.assignment_api.add_role_to_user_and_project(
+ self.user_foo['id'],
+ self.tenant_bar['id'],
+ self.role_other['id'])
+ trust_id = 'atrust123'
+ trust = {'trustor_user_id': self.user_foo['id'],
+ 'trustee_user_id': self.user_two['id'],
+ 'project_id': self.tenant_bar['id'],
+ 'impersonation': True}
+ roles = [self.role_other]
+ self.trust_api.create_trust(trust_id, trust, roles)
+
+ # Create a client for user_two, scoped to the trust
+ client = self.get_client(self.user_two)
+ ret = client.authenticate(trust_id=trust_id,
+ tenant_id=self.tenant_bar['id'])
+ self.assertTrue(ret)
+ self.assertTrue(client.auth_ref.trust_scoped)
+ self.assertEqual(trust_id, client.auth_ref.trust_id)
+
+ # Create an ec2 keypair using the trust client impersonating user_foo
+ cred = client.ec2.create(user_id=self.user_foo['id'],
+ tenant_id=self.tenant_bar['id'])
+ credentials, signature = self._generate_user_ec2_credentials(
+ cred.access, cred.secret)
+ credentials['signature'] = signature
+ resp, token = self._send_ec2_auth_request(credentials)
+ self.assertEqual(200, resp.status_code)
+ self.assertEqual(trust_id, token['access']['trust']['id'])
+ # TODO(shardy) we really want to check the roles and trustee
+ # but because of where the stubbing happens we don't seem to
+ # hit the necessary code in controllers.py _authenticate_token
+ # so although all is OK via a real request, it incorrect in
+ # this test..
+
+ def test_ec2_auth_failure(self):
+ credentials, signature = self._generate_default_user_ec2_credentials()
+ credentials['signature'] = uuid.uuid4().hex
+ self.assertRaises(client_exceptions.Unauthorized,
+ self._send_ec2_auth_request,
+ credentials)
+
+ def test_ec2_credential_crud(self):
+ creds = self.default_client.ec2.list(user_id=self.user_foo['id'])
+ self.assertEqual([], creds)
+
+ cred = self.default_client.ec2.create(user_id=self.user_foo['id'],
+ tenant_id=self.tenant_bar['id'])
+ creds = self.default_client.ec2.list(user_id=self.user_foo['id'])
+ self.assertEqual(creds, [cred])
+ got = self.default_client.ec2.get(user_id=self.user_foo['id'],
+ access=cred.access)
+ self.assertEqual(cred, got)
+
+ self.default_client.ec2.delete(user_id=self.user_foo['id'],
+ access=cred.access)
+ creds = self.default_client.ec2.list(user_id=self.user_foo['id'])
+ self.assertEqual([], creds)
+
+ def test_ec2_credential_crud_non_admin(self):
+ na_client = self.get_client(self.user_two)
+ creds = na_client.ec2.list(user_id=self.user_two['id'])
+ self.assertEqual([], creds)
+
+ cred = na_client.ec2.create(user_id=self.user_two['id'],
+ tenant_id=self.tenant_baz['id'])
+ creds = na_client.ec2.list(user_id=self.user_two['id'])
+ self.assertEqual(creds, [cred])
+ got = na_client.ec2.get(user_id=self.user_two['id'],
+ access=cred.access)
+ self.assertEqual(cred, got)
+
+ na_client.ec2.delete(user_id=self.user_two['id'],
+ access=cred.access)
+ creds = na_client.ec2.list(user_id=self.user_two['id'])
+ self.assertEqual([], creds)
+
+ def test_ec2_list_credentials(self):
+ cred_1 = self.default_client.ec2.create(
+ user_id=self.user_foo['id'],
+ tenant_id=self.tenant_bar['id'])
+ cred_2 = self.default_client.ec2.create(
+ user_id=self.user_foo['id'],
+ tenant_id=self.tenant_service['id'])
+ cred_3 = self.default_client.ec2.create(
+ user_id=self.user_foo['id'],
+ tenant_id=self.tenant_mtu['id'])
+ two = self.get_client(self.user_two)
+ cred_4 = two.ec2.create(user_id=self.user_two['id'],
+ tenant_id=self.tenant_bar['id'])
+ creds = self.default_client.ec2.list(user_id=self.user_foo['id'])
+ self.assertEqual(3, len(creds))
+ self.assertEqual(sorted([cred_1, cred_2, cred_3],
+ key=lambda x: x.access),
+ sorted(creds, key=lambda x: x.access))
+ self.assertNotIn(cred_4, creds)
+
+ def test_ec2_credentials_create_404(self):
+ self.assertRaises(client_exceptions.NotFound,
+ self.default_client.ec2.create,
+ user_id=uuid.uuid4().hex,
+ tenant_id=self.tenant_bar['id'])
+ self.assertRaises(client_exceptions.NotFound,
+ self.default_client.ec2.create,
+ user_id=self.user_foo['id'],
+ tenant_id=uuid.uuid4().hex)
+
+ def test_ec2_credentials_delete_404(self):
+ self.assertRaises(client_exceptions.NotFound,
+ self.default_client.ec2.delete,
+ user_id=uuid.uuid4().hex,
+ access=uuid.uuid4().hex)
+
+ def test_ec2_credentials_get_404(self):
+ self.assertRaises(client_exceptions.NotFound,
+ self.default_client.ec2.get,
+ user_id=uuid.uuid4().hex,
+ access=uuid.uuid4().hex)
+
+ def test_ec2_credentials_list_404(self):
+ self.assertRaises(client_exceptions.NotFound,
+ self.default_client.ec2.list,
+ user_id=uuid.uuid4().hex)
+
+ def test_ec2_credentials_list_user_forbidden(self):
+ two = self.get_client(self.user_two)
+ self.assertRaises(client_exceptions.Forbidden, two.ec2.list,
+ user_id=self.user_foo['id'])
+
+ def test_ec2_credentials_get_user_forbidden(self):
+ cred = self.default_client.ec2.create(user_id=self.user_foo['id'],
+ tenant_id=self.tenant_bar['id'])
+
+ two = self.get_client(self.user_two)
+ self.assertRaises(client_exceptions.Forbidden, two.ec2.get,
+ user_id=self.user_foo['id'], access=cred.access)
+
+ self.default_client.ec2.delete(user_id=self.user_foo['id'],
+ access=cred.access)
+
+ def test_ec2_credentials_delete_user_forbidden(self):
+ cred = self.default_client.ec2.create(user_id=self.user_foo['id'],
+ tenant_id=self.tenant_bar['id'])
+
+ two = self.get_client(self.user_two)
+ self.assertRaises(client_exceptions.Forbidden, two.ec2.delete,
+ user_id=self.user_foo['id'], access=cred.access)
+
+ self.default_client.ec2.delete(user_id=self.user_foo['id'],
+ access=cred.access)
+
+ def test_endpoint_create_nonexistent_service(self):
+ client = self.get_client(admin=True)
+ self.assertRaises(client_exceptions.BadRequest,
+ client.endpoints.create,
+ region=uuid.uuid4().hex,
+ service_id=uuid.uuid4().hex,
+ publicurl=uuid.uuid4().hex,
+ adminurl=uuid.uuid4().hex,
+ internalurl=uuid.uuid4().hex)
+
+ def test_policy_crud(self):
+ # FIXME(dolph): this test was written prior to the v3 implementation of
+ # the client and essentially refers to a non-existent
+ # policy manager in the v2 client. this test needs to be
+ # moved to a test suite running against the v3 api
+ self.skipTest('Written prior to v3 client; needs refactor')
+
+ client = self.get_client(admin=True)
+
+ policy_blob = uuid.uuid4().hex
+ policy_type = uuid.uuid4().hex
+ service = client.services.create(
+ name=uuid.uuid4().hex,
+ service_type=uuid.uuid4().hex,
+ description=uuid.uuid4().hex)
+ endpoint = client.endpoints.create(
+ service_id=service.id,
+ region=uuid.uuid4().hex,
+ adminurl=uuid.uuid4().hex,
+ internalurl=uuid.uuid4().hex,
+ publicurl=uuid.uuid4().hex)
+
+ # create
+ policy = client.policies.create(
+ blob=policy_blob,
+ type=policy_type,
+ endpoint=endpoint.id)
+ self.assertEqual(policy_blob, policy.policy)
+ self.assertEqual(policy_type, policy.type)
+ self.assertEqual(endpoint.id, policy.endpoint_id)
+
+ policy = client.policies.get(policy=policy.id)
+ self.assertEqual(policy_blob, policy.policy)
+ self.assertEqual(policy_type, policy.type)
+ self.assertEqual(endpoint.id, policy.endpoint_id)
+
+ endpoints = [x for x in client.endpoints.list() if x.id == endpoint.id]
+ endpoint = endpoints[0]
+ self.assertEqual(policy_blob, policy.policy)
+ self.assertEqual(policy_type, policy.type)
+ self.assertEqual(endpoint.id, policy.endpoint_id)
+
+ # update
+ policy_blob = uuid.uuid4().hex
+ policy_type = uuid.uuid4().hex
+ endpoint = client.endpoints.create(
+ service_id=service.id,
+ region=uuid.uuid4().hex,
+ adminurl=uuid.uuid4().hex,
+ internalurl=uuid.uuid4().hex,
+ publicurl=uuid.uuid4().hex)
+
+ policy = client.policies.update(
+ policy=policy.id,
+ blob=policy_blob,
+ type=policy_type,
+ endpoint=endpoint.id)
+
+ policy = client.policies.get(policy=policy.id)
+ self.assertEqual(policy_blob, policy.policy)
+ self.assertEqual(policy_type, policy.type)
+ self.assertEqual(endpoint.id, policy.endpoint_id)
+
+ # delete
+ client.policies.delete(policy=policy.id)
+ self.assertRaises(
+ client_exceptions.NotFound,
+ client.policies.get,
+ policy=policy.id)
+ policies = [x for x in client.policies.list() if x.id == policy.id]
+ self.assertEqual(0, len(policies))
diff --git a/keystone-moon/keystone/tests/unit/test_v3.py b/keystone-moon/keystone/tests/unit/test_v3.py
index f6d6ed93..9bbfa103 100644
--- a/keystone-moon/keystone/tests/unit/test_v3.py
+++ b/keystone-moon/keystone/tests/unit/test_v3.py
@@ -299,10 +299,11 @@ class RestfulTestCase(tests.SQLDriverOverrides, rest.RestfulTestCase,
ref = self.new_ref()
return ref
- def new_project_ref(self, domain_id, parent_id=None):
+ def new_project_ref(self, domain_id=None, parent_id=None, is_domain=False):
ref = self.new_ref()
ref['domain_id'] = domain_id
ref['parent_id'] = parent_id
+ ref['is_domain'] = is_domain
return ref
def new_user_ref(self, domain_id, project_id=None):
@@ -362,9 +363,9 @@ class RestfulTestCase(tests.SQLDriverOverrides, rest.RestfulTestCase,
if isinstance(expires, six.string_types):
ref['expires_at'] = expires
elif isinstance(expires, dict):
- ref['expires_at'] = timeutils.strtime(
- timeutils.utcnow() + datetime.timedelta(**expires),
- fmt=TIME_FORMAT)
+ ref['expires_at'] = (
+ timeutils.utcnow() + datetime.timedelta(**expires)
+ ).strftime(TIME_FORMAT)
elif expires is None:
pass
else:
@@ -396,6 +397,29 @@ class RestfulTestCase(tests.SQLDriverOverrides, rest.RestfulTestCase,
return project
+ def get_unscoped_token(self):
+ """Convenience method so that we can test authenticated requests."""
+ r = self.admin_request(
+ method='POST',
+ path='/v3/auth/tokens',
+ body={
+ 'auth': {
+ 'identity': {
+ 'methods': ['password'],
+ 'password': {
+ 'user': {
+ 'name': self.user['name'],
+ 'password': self.user['password'],
+ 'domain': {
+ 'id': self.user['domain_id']
+ }
+ }
+ }
+ }
+ }
+ })
+ return r.headers.get('X-Subject-Token')
+
def get_scoped_token(self):
"""Convenience method so that we can test authenticated requests."""
r = self.admin_request(
@@ -424,6 +448,34 @@ class RestfulTestCase(tests.SQLDriverOverrides, rest.RestfulTestCase,
})
return r.headers.get('X-Subject-Token')
+ def get_domain_scoped_token(self):
+ """Convenience method for requesting domain scoped token."""
+ r = self.admin_request(
+ method='POST',
+ path='/v3/auth/tokens',
+ body={
+ 'auth': {
+ 'identity': {
+ 'methods': ['password'],
+ 'password': {
+ 'user': {
+ 'name': self.user['name'],
+ 'password': self.user['password'],
+ 'domain': {
+ 'id': self.user['domain_id']
+ }
+ }
+ }
+ },
+ 'scope': {
+ 'domain': {
+ 'id': self.domain['id'],
+ }
+ }
+ }
+ })
+ return r.headers.get('X-Subject-Token')
+
def get_requested_token(self, auth):
"""Request the specific token we want."""
@@ -593,20 +645,6 @@ class RestfulTestCase(tests.SQLDriverOverrides, rest.RestfulTestCase,
return entity
- def assertDictContainsSubset(self, expected, actual):
- """"Asserts if dictionary actual is a superset of expected.
-
- Tests whether the key/value pairs in dictionary actual are a superset
- of those in expected.
-
- """
- for k, v in expected.iteritems():
- self.assertIn(k, actual)
- if isinstance(v, dict):
- self.assertDictContainsSubset(v, actual[k])
- else:
- self.assertEqual(v, actual[k])
-
# auth validation
def assertValidISO8601ExtendedFormatDatetime(self, dt):
@@ -752,7 +790,7 @@ class RestfulTestCase(tests.SQLDriverOverrides, rest.RestfulTestCase,
self.assertValidCatalog(resp.json['catalog'])
self.assertIn('links', resp.json)
self.assertIsInstance(resp.json['links'], dict)
- self.assertEqual(['self'], resp.json['links'].keys())
+ self.assertEqual(['self'], list(resp.json['links'].keys()))
self.assertEqual(
'http://localhost/v3/auth/catalog',
resp.json['links']['self'])
@@ -1258,6 +1296,42 @@ class AuthContextMiddlewareTestCase(RestfulTestCase):
self.assertDictEqual(req.environ.get(authorization.AUTH_CONTEXT_ENV),
{})
+ def test_unscoped_token_auth_context(self):
+ unscoped_token = self.get_unscoped_token()
+ req = self._mock_request_object(unscoped_token)
+ application = None
+ middleware.AuthContextMiddleware(application).process_request(req)
+ for key in ['project_id', 'domain_id', 'domain_name']:
+ self.assertNotIn(
+ key,
+ req.environ.get(authorization.AUTH_CONTEXT_ENV))
+
+ def test_project_scoped_token_auth_context(self):
+ project_scoped_token = self.get_scoped_token()
+ req = self._mock_request_object(project_scoped_token)
+ application = None
+ middleware.AuthContextMiddleware(application).process_request(req)
+ self.assertEqual(
+ self.project['id'],
+ req.environ.get(authorization.AUTH_CONTEXT_ENV)['project_id'])
+
+ def test_domain_scoped_token_auth_context(self):
+ # grant the domain role to user
+ path = '/domains/%s/users/%s/roles/%s' % (
+ self.domain['id'], self.user['id'], self.role['id'])
+ self.put(path=path)
+
+ domain_scoped_token = self.get_domain_scoped_token()
+ req = self._mock_request_object(domain_scoped_token)
+ application = None
+ middleware.AuthContextMiddleware(application).process_request(req)
+ self.assertEqual(
+ self.domain['id'],
+ req.environ.get(authorization.AUTH_CONTEXT_ENV)['domain_id'])
+ self.assertEqual(
+ self.domain['name'],
+ req.environ.get(authorization.AUTH_CONTEXT_ENV)['domain_name'])
+
class JsonHomeTestMixin(object):
"""JSON Home test
@@ -1281,3 +1355,88 @@ class JsonHomeTestMixin(object):
for rel in self.JSON_HOME_DATA:
self.assertThat(resp_data['resources'][rel],
matchers.Equals(self.JSON_HOME_DATA[rel]))
+
+
+class AssignmentTestMixin(object):
+ """To hold assignment helper functions."""
+
+ def build_role_assignment_query_url(self, effective=False, **filters):
+ """Build and return a role assignment query url with provided params.
+
+ Available filters are: domain_id, project_id, user_id, group_id,
+ role_id and inherited_to_projects.
+ """
+
+ query_params = '?effective' if effective else ''
+
+ for k, v in filters.items():
+ query_params += '?' if not query_params else '&'
+
+ if k == 'inherited_to_projects':
+ query_params += 'scope.OS-INHERIT:inherited_to=projects'
+ else:
+ if k in ['domain_id', 'project_id']:
+ query_params += 'scope.'
+ elif k not in ['user_id', 'group_id', 'role_id']:
+ raise ValueError(
+ 'Invalid key \'%s\' in provided filters.' % k)
+
+ query_params += '%s=%s' % (k.replace('_', '.'), v)
+
+ return '/role_assignments%s' % query_params
+
+ def build_role_assignment_link(self, **attribs):
+ """Build and return a role assignment link with provided attributes.
+
+ Provided attributes are expected to contain: domain_id or project_id,
+ user_id or group_id, role_id and, optionally, inherited_to_projects.
+ """
+
+ if attribs.get('domain_id'):
+ link = '/domains/' + attribs['domain_id']
+ else:
+ link = '/projects/' + attribs['project_id']
+
+ if attribs.get('user_id'):
+ link += '/users/' + attribs['user_id']
+ else:
+ link += '/groups/' + attribs['group_id']
+
+ link += '/roles/' + attribs['role_id']
+
+ if attribs.get('inherited_to_projects'):
+ return '/OS-INHERIT%s/inherited_to_projects' % link
+
+ return link
+
+ def build_role_assignment_entity(self, link=None, **attribs):
+ """Build and return a role assignment entity with provided attributes.
+
+ Provided attributes are expected to contain: domain_id or project_id,
+ user_id or group_id, role_id and, optionally, inherited_to_projects.
+ """
+
+ entity = {'links': {'assignment': (
+ link or self.build_role_assignment_link(**attribs))}}
+
+ if attribs.get('domain_id'):
+ entity['scope'] = {'domain': {'id': attribs['domain_id']}}
+ else:
+ entity['scope'] = {'project': {'id': attribs['project_id']}}
+
+ if attribs.get('user_id'):
+ entity['user'] = {'id': attribs['user_id']}
+
+ if attribs.get('group_id'):
+ entity['links']['membership'] = ('/groups/%s/users/%s' %
+ (attribs['group_id'],
+ attribs['user_id']))
+ else:
+ entity['group'] = {'id': attribs['group_id']}
+
+ entity['role'] = {'id': attribs['role_id']}
+
+ if attribs.get('inherited_to_projects'):
+ entity['scope']['OS-INHERIT:inherited_to'] = 'projects'
+
+ return entity
diff --git a/keystone-moon/keystone/tests/unit/test_v3_assignment.py b/keystone-moon/keystone/tests/unit/test_v3_assignment.py
index add14bfb..03e5d30b 100644
--- a/keystone-moon/keystone/tests/unit/test_v3_assignment.py
+++ b/keystone-moon/keystone/tests/unit/test_v3_assignment.py
@@ -11,107 +11,23 @@
# under the License.
import random
-import six
import uuid
from oslo_config import cfg
+from six.moves import range
from keystone.common import controller
from keystone import exception
from keystone.tests import unit as tests
from keystone.tests.unit import test_v3
+from keystone.tests.unit import utils
CONF = cfg.CONF
-def _build_role_assignment_query_url(effective=False, **filters):
- '''Build and return a role assignment query url with provided params.
-
- Available filters are: domain_id, project_id, user_id, group_id, role_id
- and inherited_to_projects.
-
- '''
-
- query_params = '?effective' if effective else ''
-
- for k, v in six.iteritems(filters):
- query_params += '?' if not query_params else '&'
-
- if k == 'inherited_to_projects':
- query_params += 'scope.OS-INHERIT:inherited_to=projects'
- else:
- if k in ['domain_id', 'project_id']:
- query_params += 'scope.'
- elif k not in ['user_id', 'group_id', 'role_id']:
- raise ValueError('Invalid key \'%s\' in provided filters.' % k)
-
- query_params += '%s=%s' % (k.replace('_', '.'), v)
-
- return '/role_assignments%s' % query_params
-
-
-def _build_role_assignment_link(**attribs):
- """Build and return a role assignment link with provided attributes.
-
- Provided attributes are expected to contain: domain_id or project_id,
- user_id or group_id, role_id and, optionally, inherited_to_projects.
-
- """
-
- if attribs.get('domain_id'):
- link = '/domains/' + attribs['domain_id']
- else:
- link = '/projects/' + attribs['project_id']
-
- if attribs.get('user_id'):
- link += '/users/' + attribs['user_id']
- else:
- link += '/groups/' + attribs['group_id']
-
- link += '/roles/' + attribs['role_id']
-
- if attribs.get('inherited_to_projects'):
- return '/OS-INHERIT%s/inherited_to_projects' % link
-
- return link
-
-
-def _build_role_assignment_entity(link=None, **attribs):
- """Build and return a role assignment entity with provided attributes.
-
- Provided attributes are expected to contain: domain_id or project_id,
- user_id or group_id, role_id and, optionally, inherited_to_projects.
-
- """
-
- entity = {'links': {'assignment': (
- link or _build_role_assignment_link(**attribs))}}
-
- if attribs.get('domain_id'):
- entity['scope'] = {'domain': {'id': attribs['domain_id']}}
- else:
- entity['scope'] = {'project': {'id': attribs['project_id']}}
-
- if attribs.get('user_id'):
- entity['user'] = {'id': attribs['user_id']}
-
- if attribs.get('group_id'):
- entity['links']['membership'] = ('/groups/%s/users/%s' %
- (attribs['group_id'],
- attribs['user_id']))
- else:
- entity['group'] = {'id': attribs['group_id']}
-
- entity['role'] = {'id': attribs['role_id']}
-
- if attribs.get('inherited_to_projects'):
- entity['scope']['OS-INHERIT:inherited_to'] = 'projects'
-
- return entity
-
-
-class AssignmentTestCase(test_v3.RestfulTestCase):
+class AssignmentTestCase(test_v3.RestfulTestCase,
+ test_v3.AssignmentTestMixin):
"""Test domains, projects, roles and role assignments."""
def setUp(self):
@@ -205,8 +121,8 @@ class AssignmentTestCase(test_v3.RestfulTestCase):
self.assignment_api.add_user_to_project(self.project2['id'],
self.user2['id'])
- # First check a user in that domain can authenticate, via
- # Both v2 and v3
+ # First check a user in that domain can authenticate. The v2 user
+ # cannot authenticate because they exist outside the default domain.
body = {
'auth': {
'passwordCredentials': {
@@ -216,7 +132,8 @@ class AssignmentTestCase(test_v3.RestfulTestCase):
'tenantId': self.project2['id']
}
}
- self.admin_request(path='/v2.0/tokens', method='POST', body=body)
+ self.admin_request(
+ path='/v2.0/tokens', method='POST', body=body, expected_status=401)
auth_data = self.build_authentication_request(
user_id=self.user2['id'],
@@ -507,26 +424,26 @@ class AssignmentTestCase(test_v3.RestfulTestCase):
for domain in create_domains():
self.assertRaises(
- AssertionError, self.assignment_api.create_domain,
+ AssertionError, self.resource_api.create_domain,
domain['id'], domain)
self.assertRaises(
- AssertionError, self.assignment_api.update_domain,
+ AssertionError, self.resource_api.update_domain,
domain['id'], domain)
self.assertRaises(
- exception.DomainNotFound, self.assignment_api.delete_domain,
+ exception.DomainNotFound, self.resource_api.delete_domain,
domain['id'])
# swap 'name' with 'id' and try again, expecting the request to
# gracefully fail
domain['id'], domain['name'] = domain['name'], domain['id']
self.assertRaises(
- AssertionError, self.assignment_api.create_domain,
+ AssertionError, self.resource_api.create_domain,
domain['id'], domain)
self.assertRaises(
- AssertionError, self.assignment_api.update_domain,
+ AssertionError, self.resource_api.update_domain,
domain['id'], domain)
self.assertRaises(
- exception.DomainNotFound, self.assignment_api.delete_domain,
+ exception.DomainNotFound, self.resource_api.delete_domain,
domain['id'])
def test_forbid_operations_on_defined_federated_domain(self):
@@ -542,47 +459,13 @@ class AssignmentTestCase(test_v3.RestfulTestCase):
domain = self.new_domain_ref()
domain['name'] = non_default_name
self.assertRaises(AssertionError,
- self.assignment_api.create_domain,
+ self.resource_api.create_domain,
domain['id'], domain)
self.assertRaises(exception.DomainNotFound,
- self.assignment_api.delete_domain,
+ self.resource_api.delete_domain,
domain['id'])
self.assertRaises(AssertionError,
- self.assignment_api.update_domain,
- domain['id'], domain)
-
- def test_set_federated_domain_when_config_empty(self):
- """Make sure we are operable even if config value is not properly
- set.
-
- This includes operations like create, update, delete.
-
- """
- federated_name = 'Federated'
- self.config_fixture.config(group='federation',
- federated_domain_name='')
- domain = self.new_domain_ref()
- domain['id'] = federated_name
- self.assertRaises(AssertionError,
- self.assignment_api.create_domain,
- domain['id'], domain)
- self.assertRaises(exception.DomainNotFound,
- self.assignment_api.delete_domain,
- domain['id'])
- self.assertRaises(AssertionError,
- self.assignment_api.update_domain,
- domain['id'], domain)
-
- # swap id with name
- domain['id'], domain['name'] = domain['name'], domain['id']
- self.assertRaises(AssertionError,
- self.assignment_api.create_domain,
- domain['id'], domain)
- self.assertRaises(exception.DomainNotFound,
- self.assignment_api.delete_domain,
- domain['id'])
- self.assertRaises(AssertionError,
- self.assignment_api.update_domain,
+ self.resource_api.update_domain,
domain['id'], domain)
# Project CRUD tests
@@ -606,8 +489,71 @@ class AssignmentTestCase(test_v3.RestfulTestCase):
"""Call ``POST /projects``."""
self.post('/projects', body={'project': {}}, expected_status=400)
+ def test_create_project_invalid_domain_id(self):
+ """Call ``POST /projects``."""
+ ref = self.new_project_ref(domain_id=uuid.uuid4().hex)
+ self.post('/projects', body={'project': ref}, expected_status=400)
+
+ def test_create_project_is_domain_not_allowed(self):
+ """Call ``POST /projects``.
+
+ Setting is_domain=True is not supported yet and should raise
+ NotImplemented.
+
+ """
+ ref = self.new_project_ref(domain_id=self.domain_id, is_domain=True)
+ self.post('/projects',
+ body={'project': ref},
+ expected_status=501)
+
+ @utils.wip('waiting for projects acting as domains implementation')
+ def test_create_project_without_parent_id_and_without_domain_id(self):
+ """Call ``POST /projects``."""
+
+ # Grant a domain role for the user
+ collection_url = (
+ '/domains/%(domain_id)s/users/%(user_id)s/roles' % {
+ 'domain_id': self.domain_id,
+ 'user_id': self.user['id']})
+ member_url = '%(collection_url)s/%(role_id)s' % {
+ 'collection_url': collection_url,
+ 'role_id': self.role_id}
+ self.put(member_url)
+
+ # Create an authentication request for a domain scoped token
+ auth = self.build_authentication_request(
+ user_id=self.user['id'],
+ password=self.user['password'],
+ domain_id=self.domain_id)
+
+ # Without domain_id and parent_id, the domain_id should be
+ # normalized to the domain on the token, when using a domain
+ # scoped token.
+ ref = self.new_project_ref()
+ r = self.post(
+ '/projects',
+ auth=auth,
+ body={'project': ref})
+ ref['domain_id'] = self.domain['id']
+ self.assertValidProjectResponse(r, ref)
+
+ @utils.wip('waiting for projects acting as domains implementation')
+ def test_create_project_with_parent_id_and_no_domain_id(self):
+ """Call ``POST /projects``."""
+ # With only the parent_id, the domain_id should be
+ # normalized to the parent's domain_id
+ ref_child = self.new_project_ref(parent_id=self.project['id'])
+
+ r = self.post(
+ '/projects',
+ body={'project': ref_child})
+ self.assertEqual(r.result['project']['domain_id'],
+ self.project['domain_id'])
+ ref_child['domain_id'] = self.domain['id']
+ self.assertValidProjectResponse(r, ref_child)
+
def _create_projects_hierarchy(self, hierarchy_size=1):
- """Creates a project hierarchy with specified size.
+ """Creates a single-branched project hierarchy with the specified size.
:param hierarchy_size: the desired hierarchy size, default is 1 -
a project with one child.
@@ -615,9 +561,8 @@ class AssignmentTestCase(test_v3.RestfulTestCase):
:returns projects: a list of the projects in the created hierarchy.
"""
- resp = self.get(
- '/projects/%(project_id)s' % {
- 'project_id': self.project_id})
+ new_ref = self.new_project_ref(domain_id=self.domain_id)
+ resp = self.post('/projects', body={'project': new_ref})
projects = [resp.result]
@@ -633,6 +578,58 @@ class AssignmentTestCase(test_v3.RestfulTestCase):
return projects
+ def test_list_projects_filtering_by_parent_id(self):
+ """Call ``GET /projects?parent_id={project_id}``."""
+ projects = self._create_projects_hierarchy(hierarchy_size=2)
+
+ # Add another child to projects[1] - it will be projects[3]
+ new_ref = self.new_project_ref(
+ domain_id=self.domain_id,
+ parent_id=projects[1]['project']['id'])
+ resp = self.post('/projects',
+ body={'project': new_ref})
+ self.assertValidProjectResponse(resp, new_ref)
+
+ projects.append(resp.result)
+
+ # Query for projects[0] immediate children - it will
+ # be only projects[1]
+ r = self.get(
+ '/projects?parent_id=%(project_id)s' % {
+ 'project_id': projects[0]['project']['id']})
+ self.assertValidProjectListResponse(r)
+
+ projects_result = r.result['projects']
+ expected_list = [projects[1]['project']]
+
+ # projects[0] has projects[1] as child
+ self.assertEqual(expected_list, projects_result)
+
+ # Query for projects[1] immediate children - it will
+ # be projects[2] and projects[3]
+ r = self.get(
+ '/projects?parent_id=%(project_id)s' % {
+ 'project_id': projects[1]['project']['id']})
+ self.assertValidProjectListResponse(r)
+
+ projects_result = r.result['projects']
+ expected_list = [projects[2]['project'], projects[3]['project']]
+
+ # projects[1] has projects[2] and projects[3] as children
+ self.assertEqual(expected_list, projects_result)
+
+ # Query for projects[2] immediate children - it will be an empty list
+ r = self.get(
+ '/projects?parent_id=%(project_id)s' % {
+ 'project_id': projects[2]['project']['id']})
+ self.assertValidProjectListResponse(r)
+
+ projects_result = r.result['projects']
+ expected_list = []
+
+ # projects[2] has no child, projects_result must be an empty list
+ self.assertEqual(expected_list, projects_result)
+
def test_create_hierarchical_project(self):
"""Call ``POST /projects``."""
self._create_projects_hierarchy()
@@ -644,6 +641,22 @@ class AssignmentTestCase(test_v3.RestfulTestCase):
'project_id': self.project_id})
self.assertValidProjectResponse(r, self.project)
+ def test_get_project_with_parents_as_list_with_invalid_id(self):
+ """Call ``GET /projects/{project_id}?parents_as_list``."""
+ self.get('/projects/%(project_id)s?parents_as_list' % {
+ 'project_id': None}, expected_status=404)
+
+ self.get('/projects/%(project_id)s?parents_as_list' % {
+ 'project_id': uuid.uuid4().hex}, expected_status=404)
+
+ def test_get_project_with_subtree_as_list_with_invalid_id(self):
+ """Call ``GET /projects/{project_id}?subtree_as_list``."""
+ self.get('/projects/%(project_id)s?subtree_as_list' % {
+ 'project_id': None}, expected_status=404)
+
+ self.get('/projects/%(project_id)s?subtree_as_list' % {
+ 'project_id': uuid.uuid4().hex}, expected_status=404)
+
def test_get_project_with_parents_as_ids(self):
"""Call ``GET /projects/{project_id}?parents_as_ids``."""
projects = self._create_projects_hierarchy(hierarchy_size=2)
@@ -683,18 +696,66 @@ class AssignmentTestCase(test_v3.RestfulTestCase):
# projects[0] has no parents, parents_as_ids must be None
self.assertIsNone(parents_as_ids)
- def test_get_project_with_parents_as_list(self):
- """Call ``GET /projects/{project_id}?parents_as_list``."""
- projects = self._create_projects_hierarchy(hierarchy_size=2)
+ def test_get_project_with_parents_as_list_with_full_access(self):
+ """``GET /projects/{project_id}?parents_as_list`` with full access.
- r = self.get(
- '/projects/%(project_id)s?parents_as_list' % {
- 'project_id': projects[1]['project']['id']})
+ Test plan:
+ - Create 'parent', 'project' and 'subproject' projects;
+ - Assign a user a role on each one of those projects;
+ - Check that calling parents_as_list on 'subproject' returns both
+ 'project' and 'parent'.
+
+ """
+
+ # Create the project hierarchy
+ parent, project, subproject = self._create_projects_hierarchy(2)
+
+ # Assign a role for the user on all the created projects
+ for proj in (parent, project, subproject):
+ self.put(self.build_role_assignment_link(
+ role_id=self.role_id, user_id=self.user_id,
+ project_id=proj['project']['id']))
+
+ # Make the API call
+ r = self.get('/projects/%(project_id)s?parents_as_list' %
+ {'project_id': subproject['project']['id']})
+ self.assertValidProjectResponse(r, subproject['project'])
+
+ # Assert only 'project' and 'parent' are in the parents list
+ self.assertIn(project, r.result['project']['parents'])
+ self.assertIn(parent, r.result['project']['parents'])
+ self.assertEqual(2, len(r.result['project']['parents']))
+
+ def test_get_project_with_parents_as_list_with_partial_access(self):
+ """``GET /projects/{project_id}?parents_as_list`` with partial access.
+
+ Test plan:
+
+ - Create 'parent', 'project' and 'subproject' projects;
+ - Assign a user a role on 'parent' and 'subproject';
+ - Check that calling parents_as_list on 'subproject' only returns
+ 'parent'.
+
+ """
+
+ # Create the project hierarchy
+ parent, project, subproject = self._create_projects_hierarchy(2)
+
+ # Assign a role for the user on parent and subproject
+ for proj in (parent, subproject):
+ self.put(self.build_role_assignment_link(
+ role_id=self.role_id, user_id=self.user_id,
+ project_id=proj['project']['id']))
+
+ # Make the API call
+ r = self.get('/projects/%(project_id)s?parents_as_list' %
+ {'project_id': subproject['project']['id']})
+ self.assertValidProjectResponse(r, subproject['project'])
+
+ # Assert only 'parent' is in the parents list
+ self.assertIn(parent, r.result['project']['parents'])
self.assertEqual(1, len(r.result['project']['parents']))
- self.assertValidProjectResponse(r, projects[1]['project'])
- self.assertIn(projects[0], r.result['project']['parents'])
- self.assertNotIn(projects[2], r.result['project']['parents'])
def test_get_project_with_parents_as_list_and_parents_as_ids(self):
"""Call ``GET /projects/{project_id}?parents_as_list&parents_as_ids``.
@@ -798,18 +859,65 @@ class AssignmentTestCase(test_v3.RestfulTestCase):
# projects[3] has no subtree, subtree_as_ids must be None
self.assertIsNone(subtree_as_ids)
- def test_get_project_with_subtree_as_list(self):
- """Call ``GET /projects/{project_id}?subtree_as_list``."""
- projects = self._create_projects_hierarchy(hierarchy_size=2)
+ def test_get_project_with_subtree_as_list_with_full_access(self):
+ """``GET /projects/{project_id}?subtree_as_list`` with full access.
- r = self.get(
- '/projects/%(project_id)s?subtree_as_list' % {
- 'project_id': projects[1]['project']['id']})
+ Test plan:
+
+ - Create 'parent', 'project' and 'subproject' projects;
+ - Assign a user a role on each one of those projects;
+ - Check that calling subtree_as_list on 'parent' returns both 'parent'
+ and 'subproject'.
+
+ """
+
+ # Create the project hierarchy
+ parent, project, subproject = self._create_projects_hierarchy(2)
+
+ # Assign a role for the user on all the created projects
+ for proj in (parent, project, subproject):
+ self.put(self.build_role_assignment_link(
+ role_id=self.role_id, user_id=self.user_id,
+ project_id=proj['project']['id']))
+ # Make the API call
+ r = self.get('/projects/%(project_id)s?subtree_as_list' %
+ {'project_id': parent['project']['id']})
+ self.assertValidProjectResponse(r, parent['project'])
+
+ # Assert only 'project' and 'subproject' are in the subtree
+ self.assertIn(project, r.result['project']['subtree'])
+ self.assertIn(subproject, r.result['project']['subtree'])
+ self.assertEqual(2, len(r.result['project']['subtree']))
+
+ def test_get_project_with_subtree_as_list_with_partial_access(self):
+ """``GET /projects/{project_id}?subtree_as_list`` with partial access.
+
+ Test plan:
+
+ - Create 'parent', 'project' and 'subproject' projects;
+ - Assign a user a role on 'parent' and 'subproject';
+ - Check that calling subtree_as_list on 'parent' returns 'subproject'.
+
+ """
+
+ # Create the project hierarchy
+ parent, project, subproject = self._create_projects_hierarchy(2)
+
+ # Assign a role for the user on parent and subproject
+ for proj in (parent, subproject):
+ self.put(self.build_role_assignment_link(
+ role_id=self.role_id, user_id=self.user_id,
+ project_id=proj['project']['id']))
+
+ # Make the API call
+ r = self.get('/projects/%(project_id)s?subtree_as_list' %
+ {'project_id': parent['project']['id']})
+ self.assertValidProjectResponse(r, parent['project'])
+
+ # Assert only 'subproject' is in the subtree
+ self.assertIn(subproject, r.result['project']['subtree'])
self.assertEqual(1, len(r.result['project']['subtree']))
- self.assertValidProjectResponse(r, projects[1]['project'])
- self.assertNotIn(projects[0], r.result['project']['subtree'])
- self.assertIn(projects[2], r.result['project']['subtree'])
def test_get_project_with_subtree_as_list_and_subtree_as_ids(self):
"""Call ``GET /projects/{project_id}?subtree_as_list&subtree_as_ids``.
@@ -859,6 +967,22 @@ class AssignmentTestCase(test_v3.RestfulTestCase):
body={'project': leaf_project},
expected_status=403)
+ def test_update_project_is_domain_not_allowed(self):
+ """Call ``PATCH /projects/{project_id}`` with is_domain.
+
+ The is_domain flag is immutable.
+ """
+ project = self.new_project_ref(domain_id=self.domain['id'])
+ resp = self.post('/projects',
+ body={'project': project})
+ self.assertFalse(resp.result['project']['is_domain'])
+
+ project['is_domain'] = True
+ self.patch('/projects/%(project_id)s' % {
+ 'project_id': resp.result['project']['id']},
+ body={'project': project},
+ expected_status=400)
+
def test_disable_leaf_project(self):
"""Call ``PATCH /projects/{project_id}``."""
projects = self._create_projects_hierarchy()
@@ -920,10 +1044,10 @@ class AssignmentTestCase(test_v3.RestfulTestCase):
def test_delete_not_leaf_project(self):
"""Call ``DELETE /projects/{project_id}``."""
- self._create_projects_hierarchy()
+ projects = self._create_projects_hierarchy()
self.delete(
'/projects/%(project_id)s' % {
- 'project_id': self.project_id},
+ 'project_id': projects[0]['project']['id']},
expected_status=403)
# Role CRUD tests
@@ -967,6 +1091,19 @@ class AssignmentTestCase(test_v3.RestfulTestCase):
self.delete('/roles/%(role_id)s' % {
'role_id': self.role_id})
+ def test_create_member_role(self):
+ """Call ``POST /roles``."""
+ # specify only the name on creation
+ ref = self.new_role_ref()
+ ref['name'] = CONF.member_role_name
+ r = self.post(
+ '/roles',
+ body={'role': ref})
+ self.assertValidRoleResponse(r, ref)
+
+ # but the ID should be set as defined in CONF
+ self.assertEqual(CONF.member_role_id, r.json['role']['id'])
+
# Role Grants tests
def test_crud_user_project_role_grants(self):
@@ -1252,9 +1389,9 @@ class AssignmentTestCase(test_v3.RestfulTestCase):
# Now add one of each of the four types of assignment, making sure
# that we get them all back.
- gd_entity = _build_role_assignment_entity(domain_id=self.domain_id,
- group_id=self.group_id,
- role_id=self.role_id)
+ gd_entity = self.build_role_assignment_entity(domain_id=self.domain_id,
+ group_id=self.group_id,
+ role_id=self.role_id)
self.put(gd_entity['links']['assignment'])
r = self.get(collection_url)
self.assertValidRoleAssignmentListResponse(
@@ -1263,9 +1400,9 @@ class AssignmentTestCase(test_v3.RestfulTestCase):
resource_url=collection_url)
self.assertRoleAssignmentInListResponse(r, gd_entity)
- ud_entity = _build_role_assignment_entity(domain_id=self.domain_id,
- user_id=self.user1['id'],
- role_id=self.role_id)
+ ud_entity = self.build_role_assignment_entity(domain_id=self.domain_id,
+ user_id=self.user1['id'],
+ role_id=self.role_id)
self.put(ud_entity['links']['assignment'])
r = self.get(collection_url)
self.assertValidRoleAssignmentListResponse(
@@ -1274,9 +1411,9 @@ class AssignmentTestCase(test_v3.RestfulTestCase):
resource_url=collection_url)
self.assertRoleAssignmentInListResponse(r, ud_entity)
- gp_entity = _build_role_assignment_entity(project_id=self.project_id,
- group_id=self.group_id,
- role_id=self.role_id)
+ gp_entity = self.build_role_assignment_entity(
+ project_id=self.project_id, group_id=self.group_id,
+ role_id=self.role_id)
self.put(gp_entity['links']['assignment'])
r = self.get(collection_url)
self.assertValidRoleAssignmentListResponse(
@@ -1285,9 +1422,9 @@ class AssignmentTestCase(test_v3.RestfulTestCase):
resource_url=collection_url)
self.assertRoleAssignmentInListResponse(r, gp_entity)
- up_entity = _build_role_assignment_entity(project_id=self.project_id,
- user_id=self.user1['id'],
- role_id=self.role_id)
+ up_entity = self.build_role_assignment_entity(
+ project_id=self.project_id, user_id=self.user1['id'],
+ role_id=self.role_id)
self.put(up_entity['links']['assignment'])
r = self.get(collection_url)
self.assertValidRoleAssignmentListResponse(
@@ -1346,9 +1483,9 @@ class AssignmentTestCase(test_v3.RestfulTestCase):
resource_url=collection_url)
existing_assignments = len(r.result.get('role_assignments'))
- gd_entity = _build_role_assignment_entity(domain_id=self.domain_id,
- group_id=self.group_id,
- role_id=self.role_id)
+ gd_entity = self.build_role_assignment_entity(domain_id=self.domain_id,
+ group_id=self.group_id,
+ role_id=self.role_id)
self.put(gd_entity['links']['assignment'])
r = self.get(collection_url)
self.assertValidRoleAssignmentListResponse(
@@ -1366,11 +1503,11 @@ class AssignmentTestCase(test_v3.RestfulTestCase):
r,
expected_length=existing_assignments + 2,
resource_url=collection_url)
- ud_entity = _build_role_assignment_entity(
+ ud_entity = self.build_role_assignment_entity(
link=gd_entity['links']['assignment'], domain_id=self.domain_id,
user_id=self.user1['id'], role_id=self.role_id)
self.assertRoleAssignmentInListResponse(r, ud_entity)
- ud_entity = _build_role_assignment_entity(
+ ud_entity = self.build_role_assignment_entity(
link=gd_entity['links']['assignment'], domain_id=self.domain_id,
user_id=self.user2['id'], role_id=self.role_id)
self.assertRoleAssignmentInListResponse(r, ud_entity)
@@ -1420,9 +1557,9 @@ class AssignmentTestCase(test_v3.RestfulTestCase):
resource_url=collection_url)
existing_assignments = len(r.result.get('role_assignments'))
- gd_entity = _build_role_assignment_entity(domain_id=self.domain_id,
- group_id=self.group_id,
- role_id=self.role_id)
+ gd_entity = self.build_role_assignment_entity(domain_id=self.domain_id,
+ group_id=self.group_id,
+ role_id=self.role_id)
self.put(gd_entity['links']['assignment'])
r = self.get(collection_url)
self.assertValidRoleAssignmentListResponse(
@@ -1516,22 +1653,22 @@ class AssignmentTestCase(test_v3.RestfulTestCase):
# Now add one of each of the four types of assignment
- gd_entity = _build_role_assignment_entity(domain_id=self.domain_id,
- group_id=self.group1['id'],
- role_id=self.role1['id'])
+ gd_entity = self.build_role_assignment_entity(
+ domain_id=self.domain_id, group_id=self.group1['id'],
+ role_id=self.role1['id'])
self.put(gd_entity['links']['assignment'])
- ud_entity = _build_role_assignment_entity(domain_id=self.domain_id,
- user_id=self.user1['id'],
- role_id=self.role2['id'])
+ ud_entity = self.build_role_assignment_entity(domain_id=self.domain_id,
+ user_id=self.user1['id'],
+ role_id=self.role2['id'])
self.put(ud_entity['links']['assignment'])
- gp_entity = _build_role_assignment_entity(
+ gp_entity = self.build_role_assignment_entity(
project_id=self.project1['id'], group_id=self.group1['id'],
role_id=self.role1['id'])
self.put(gp_entity['links']['assignment'])
- up_entity = _build_role_assignment_entity(
+ up_entity = self.build_role_assignment_entity(
project_id=self.project1['id'], user_id=self.user1['id'],
role_id=self.role2['id'])
self.put(up_entity['links']['assignment'])
@@ -1607,17 +1744,17 @@ class AssignmentTestCase(test_v3.RestfulTestCase):
self.assertRoleAssignmentInListResponse(r, up_entity)
self.assertRoleAssignmentInListResponse(r, ud_entity)
# ...and the two via group membership...
- gp1_link = _build_role_assignment_link(project_id=self.project1['id'],
- group_id=self.group1['id'],
- role_id=self.role1['id'])
- gd1_link = _build_role_assignment_link(domain_id=self.domain_id,
- group_id=self.group1['id'],
- role_id=self.role1['id'])
-
- up1_entity = _build_role_assignment_entity(
+ gp1_link = self.build_role_assignment_link(
+ project_id=self.project1['id'], group_id=self.group1['id'],
+ role_id=self.role1['id'])
+ gd1_link = self.build_role_assignment_link(domain_id=self.domain_id,
+ group_id=self.group1['id'],
+ role_id=self.role1['id'])
+
+ up1_entity = self.build_role_assignment_entity(
link=gp1_link, project_id=self.project1['id'],
user_id=self.user1['id'], role_id=self.role1['id'])
- ud1_entity = _build_role_assignment_entity(
+ ud1_entity = self.build_role_assignment_entity(
link=gd1_link, domain_id=self.domain_id, user_id=self.user1['id'],
role_id=self.role1['id'])
self.assertRoleAssignmentInListResponse(r, up1_entity)
@@ -1641,7 +1778,8 @@ class AssignmentTestCase(test_v3.RestfulTestCase):
self.assertRoleAssignmentInListResponse(r, up1_entity)
-class RoleAssignmentBaseTestCase(test_v3.RestfulTestCase):
+class RoleAssignmentBaseTestCase(test_v3.RestfulTestCase,
+ test_v3.AssignmentTestMixin):
"""Base class for testing /v3/role_assignments API behavior."""
MAX_HIERARCHY_BREADTH = 3
@@ -1665,8 +1803,8 @@ class RoleAssignmentBaseTestCase(test_v3.RestfulTestCase):
for i in range(breadth):
subprojects.append(self.new_project_ref(
domain_id=self.domain_id, parent_id=parent_id))
- self.assignment_api.create_project(subprojects[-1]['id'],
- subprojects[-1])
+ self.resource_api.create_project(subprojects[-1]['id'],
+ subprojects[-1])
new_parent = subprojects[random.randint(0, breadth - 1)]
create_project_hierarchy(new_parent['id'], depth - 1)
@@ -1676,12 +1814,12 @@ class RoleAssignmentBaseTestCase(test_v3.RestfulTestCase):
# Create a domain
self.domain = self.new_domain_ref()
self.domain_id = self.domain['id']
- self.assignment_api.create_domain(self.domain_id, self.domain)
+ self.resource_api.create_domain(self.domain_id, self.domain)
# Create a project hierarchy
self.project = self.new_project_ref(domain_id=self.domain_id)
self.project_id = self.project['id']
- self.assignment_api.create_project(self.project_id, self.project)
+ self.resource_api.create_project(self.project_id, self.project)
# Create a random project hierarchy
create_project_hierarchy(self.project_id,
@@ -1714,7 +1852,7 @@ class RoleAssignmentBaseTestCase(test_v3.RestfulTestCase):
# Create a role
self.role = self.new_role_ref()
self.role_id = self.role['id']
- self.assignment_api.create_role(self.role_id, self.role)
+ self.role_api.create_role(self.role_id, self.role)
# Set default user and group to be used on tests
self.default_user_id = self.user_ids[0]
@@ -1748,7 +1886,7 @@ class RoleAssignmentBaseTestCase(test_v3.RestfulTestCase):
:returns: role assignments query URL.
"""
- return _build_role_assignment_query_url(**filters)
+ return self.build_role_assignment_query_url(**filters)
class RoleAssignmentFailureTestCase(RoleAssignmentBaseTestCase):
@@ -1869,7 +2007,7 @@ class RoleAssignmentDirectTestCase(RoleAssignmentBaseTestCase):
:returns: the list of the expected role assignments.
"""
- return [_build_role_assignment_entity(**filters)]
+ return [self.build_role_assignment_entity(**filters)]
# Test cases below call the generic test method, providing different filter
# combinations. Filters are provided as specified in the method name, after
@@ -1980,8 +2118,8 @@ class RoleAssignmentEffectiveTestCase(RoleAssignmentInheritedTestCase):
query_filters.pop('domain_id', None)
query_filters.pop('project_id', None)
- return _build_role_assignment_query_url(effective=True,
- **query_filters)
+ return self.build_role_assignment_query_url(effective=True,
+ **query_filters)
def _list_expected_role_assignments(self, **filters):
"""Given the filters, it returns expected direct role assignments.
@@ -1995,7 +2133,7 @@ class RoleAssignmentEffectiveTestCase(RoleAssignmentInheritedTestCase):
"""
# Get assignment link, to be put on 'links': {'assignment': link}
- assignment_link = _build_role_assignment_link(**filters)
+ assignment_link = self.build_role_assignment_link(**filters)
# Expand group membership
user_ids = [None]
@@ -2010,11 +2148,11 @@ class RoleAssignmentEffectiveTestCase(RoleAssignmentInheritedTestCase):
project_ids = [None]
if filters.get('domain_id'):
project_ids = [project['id'] for project in
- self.assignment_api.list_projects_in_domain(
+ self.resource_api.list_projects_in_domain(
filters.pop('domain_id'))]
else:
project_ids = [project['id'] for project in
- self.assignment_api.list_projects_in_subtree(
+ self.resource_api.list_projects_in_subtree(
self.project_id)]
# Compute expected role assignments
@@ -2023,13 +2161,14 @@ class RoleAssignmentEffectiveTestCase(RoleAssignmentInheritedTestCase):
filters['project_id'] = project_id
for user_id in user_ids:
filters['user_id'] = user_id
- assignments.append(_build_role_assignment_entity(
+ assignments.append(self.build_role_assignment_entity(
link=assignment_link, **filters))
return assignments
-class AssignmentInheritanceTestCase(test_v3.RestfulTestCase):
+class AssignmentInheritanceTestCase(test_v3.RestfulTestCase,
+ test_v3.AssignmentTestMixin):
"""Test inheritance crud and its effects."""
def config_overrides(self):
@@ -2058,7 +2197,7 @@ class AssignmentInheritanceTestCase(test_v3.RestfulTestCase):
self.v3_authenticate_token(project_auth_data, expected_status=401)
# Grant non-inherited role for user on domain
- non_inher_ud_link = _build_role_assignment_link(
+ non_inher_ud_link = self.build_role_assignment_link(
domain_id=self.domain_id, user_id=user['id'], role_id=self.role_id)
self.put(non_inher_ud_link)
@@ -2071,7 +2210,7 @@ class AssignmentInheritanceTestCase(test_v3.RestfulTestCase):
self.role_api.create_role(inherited_role['id'], inherited_role)
# Grant inherited role for user on domain
- inher_ud_link = _build_role_assignment_link(
+ inher_ud_link = self.build_role_assignment_link(
domain_id=self.domain_id, user_id=user['id'],
role_id=inherited_role['id'], inherited_to_projects=True)
self.put(inher_ud_link)
@@ -2120,7 +2259,7 @@ class AssignmentInheritanceTestCase(test_v3.RestfulTestCase):
self.v3_authenticate_token(project_auth_data, expected_status=401)
# Grant non-inherited role for user on domain
- non_inher_gd_link = _build_role_assignment_link(
+ non_inher_gd_link = self.build_role_assignment_link(
domain_id=self.domain_id, user_id=user['id'], role_id=self.role_id)
self.put(non_inher_gd_link)
@@ -2133,7 +2272,7 @@ class AssignmentInheritanceTestCase(test_v3.RestfulTestCase):
self.role_api.create_role(inherited_role['id'], inherited_role)
# Grant inherited role for user on domain
- inher_gd_link = _build_role_assignment_link(
+ inher_gd_link = self.build_role_assignment_link(
domain_id=self.domain_id, user_id=user['id'],
role_id=inherited_role['id'], inherited_to_projects=True)
self.put(inher_gd_link)
@@ -2155,6 +2294,48 @@ class AssignmentInheritanceTestCase(test_v3.RestfulTestCase):
# Check the user cannot get a domain token anymore
self.v3_authenticate_token(domain_auth_data, expected_status=401)
+ def _test_crud_inherited_and_direct_assignment_on_target(self, target_url):
+ # Create a new role to avoid assignments loaded from sample data
+ role = self.new_role_ref()
+ self.role_api.create_role(role['id'], role)
+
+ # Define URLs
+ direct_url = '%s/users/%s/roles/%s' % (
+ target_url, self.user_id, role['id'])
+ inherited_url = '/OS-INHERIT/%s/inherited_to_projects' % direct_url
+
+ # Create the direct assignment
+ self.put(direct_url)
+ # Check the direct assignment exists, but the inherited one does not
+ self.head(direct_url)
+ self.head(inherited_url, expected_status=404)
+
+ # Now add the inherited assignment
+ self.put(inherited_url)
+ # Check both the direct and inherited assignment exist
+ self.head(direct_url)
+ self.head(inherited_url)
+
+ # Delete indirect assignment
+ self.delete(inherited_url)
+ # Check the direct assignment exists, but the inherited one does not
+ self.head(direct_url)
+ self.head(inherited_url, expected_status=404)
+
+ # Now delete the inherited assignment
+ self.delete(direct_url)
+ # Check that none of them exist
+ self.head(direct_url, expected_status=404)
+ self.head(inherited_url, expected_status=404)
+
+ def test_crud_inherited_and_direct_assignment_on_domains(self):
+ self._test_crud_inherited_and_direct_assignment_on_target(
+ '/domains/%s' % self.domain_id)
+
+ def test_crud_inherited_and_direct_assignment_on_projects(self):
+ self._test_crud_inherited_and_direct_assignment_on_target(
+ '/projects/%s' % self.project_id)
+
def test_crud_user_inherited_domain_role_grants(self):
role_list = []
for _ in range(2):
@@ -2260,7 +2441,7 @@ class AssignmentInheritanceTestCase(test_v3.RestfulTestCase):
self.assertValidRoleAssignmentListResponse(r,
expected_length=1,
resource_url=collection_url)
- ud_entity = _build_role_assignment_entity(
+ ud_entity = self.build_role_assignment_entity(
domain_id=domain['id'], user_id=user1['id'],
role_id=role_list[3]['id'], inherited_to_projects=True)
self.assertRoleAssignmentInListResponse(r, ud_entity)
@@ -2279,14 +2460,13 @@ class AssignmentInheritanceTestCase(test_v3.RestfulTestCase):
resource_url=collection_url)
# An effective role for an inherited role will be a project
# entity, with a domain link to the inherited assignment
- ud_url = _build_role_assignment_link(
+ ud_url = self.build_role_assignment_link(
domain_id=domain['id'], user_id=user1['id'],
role_id=role_list[3]['id'], inherited_to_projects=True)
- up_entity = _build_role_assignment_entity(link=ud_url,
- project_id=project1['id'],
- user_id=user1['id'],
- role_id=role_list[3]['id'],
- inherited_to_projects=True)
+ up_entity = self.build_role_assignment_entity(
+ link=ud_url, project_id=project1['id'],
+ user_id=user1['id'], role_id=role_list[3]['id'],
+ inherited_to_projects=True)
self.assertRoleAssignmentInListResponse(r, up_entity)
def test_list_role_assignments_for_disabled_inheritance_extension(self):
@@ -2360,14 +2540,13 @@ class AssignmentInheritanceTestCase(test_v3.RestfulTestCase):
expected_length=3,
resource_url=collection_url)
- ud_url = _build_role_assignment_link(
+ ud_url = self.build_role_assignment_link(
domain_id=domain['id'], user_id=user1['id'],
role_id=role_list[3]['id'], inherited_to_projects=True)
- up_entity = _build_role_assignment_entity(link=ud_url,
- project_id=project1['id'],
- user_id=user1['id'],
- role_id=role_list[3]['id'],
- inherited_to_projects=True)
+ up_entity = self.build_role_assignment_entity(
+ link=ud_url, project_id=project1['id'],
+ user_id=user1['id'], role_id=role_list[3]['id'],
+ inherited_to_projects=True)
self.assertRoleAssignmentInListResponse(r, up_entity)
@@ -2463,7 +2642,7 @@ class AssignmentInheritanceTestCase(test_v3.RestfulTestCase):
self.assertValidRoleAssignmentListResponse(r,
expected_length=1,
resource_url=collection_url)
- gd_entity = _build_role_assignment_entity(
+ gd_entity = self.build_role_assignment_entity(
domain_id=domain['id'], group_id=group1['id'],
role_id=role_list[3]['id'], inherited_to_projects=True)
self.assertRoleAssignmentInListResponse(r, gd_entity)
@@ -2482,7 +2661,7 @@ class AssignmentInheritanceTestCase(test_v3.RestfulTestCase):
resource_url=collection_url)
# An effective role for an inherited role will be a project
# entity, with a domain link to the inherited assignment
- up_entity = _build_role_assignment_entity(
+ up_entity = self.build_role_assignment_entity(
link=gd_entity['links']['assignment'], project_id=project1['id'],
user_id=user1['id'], role_id=role_list[3]['id'],
inherited_to_projects=True)
@@ -2573,10 +2752,10 @@ class AssignmentInheritanceTestCase(test_v3.RestfulTestCase):
self.assertValidRoleAssignmentListResponse(r,
expected_length=2,
resource_url=collection_url)
- ud_entity = _build_role_assignment_entity(
+ ud_entity = self.build_role_assignment_entity(
domain_id=domain['id'], user_id=user1['id'],
role_id=role_list[3]['id'], inherited_to_projects=True)
- gd_entity = _build_role_assignment_entity(
+ gd_entity = self.build_role_assignment_entity(
domain_id=domain['id'], group_id=group1['id'],
role_id=role_list[4]['id'], inherited_to_projects=True)
self.assertRoleAssignmentInListResponse(r, ud_entity)
@@ -2626,7 +2805,7 @@ class AssignmentInheritanceTestCase(test_v3.RestfulTestCase):
self.v3_authenticate_token(leaf_project_auth_data, expected_status=401)
# Grant non-inherited role for user on leaf project
- non_inher_up_link = _build_role_assignment_link(
+ non_inher_up_link = self.build_role_assignment_link(
project_id=leaf_id, user_id=self.user['id'],
role_id=non_inherited_role_id)
self.put(non_inher_up_link)
@@ -2636,7 +2815,7 @@ class AssignmentInheritanceTestCase(test_v3.RestfulTestCase):
self.v3_authenticate_token(leaf_project_auth_data)
# Grant inherited role for user on root project
- inher_up_link = _build_role_assignment_link(
+ inher_up_link = self.build_role_assignment_link(
project_id=root_id, user_id=self.user['id'],
role_id=inherited_role_id, inherited_to_projects=True)
self.put(inher_up_link)
@@ -2683,7 +2862,7 @@ class AssignmentInheritanceTestCase(test_v3.RestfulTestCase):
self.v3_authenticate_token(leaf_project_auth_data, expected_status=401)
# Grant non-inherited role for group on leaf project
- non_inher_gp_link = _build_role_assignment_link(
+ non_inher_gp_link = self.build_role_assignment_link(
project_id=leaf_id, group_id=group['id'],
role_id=non_inherited_role_id)
self.put(non_inher_gp_link)
@@ -2693,7 +2872,7 @@ class AssignmentInheritanceTestCase(test_v3.RestfulTestCase):
self.v3_authenticate_token(leaf_project_auth_data)
# Grant inherited role for group on root project
- inher_gp_link = _build_role_assignment_link(
+ inher_gp_link = self.build_role_assignment_link(
project_id=root_id, group_id=group['id'],
role_id=inherited_role_id, inherited_to_projects=True)
self.put(inher_gp_link)
@@ -2732,13 +2911,13 @@ class AssignmentInheritanceTestCase(test_v3.RestfulTestCase):
self._setup_hierarchical_projects_scenario())
# Grant non-inherited role
- non_inher_up_entity = _build_role_assignment_entity(
+ non_inher_up_entity = self.build_role_assignment_entity(
project_id=root_id, user_id=self.user['id'],
role_id=non_inherited_role_id)
self.put(non_inher_up_entity['links']['assignment'])
# Grant inherited role
- inher_up_entity = _build_role_assignment_entity(
+ inher_up_entity = self.build_role_assignment_entity(
project_id=root_id, user_id=self.user['id'],
role_id=inherited_role_id, inherited_to_projects=True)
self.put(inher_up_entity['links']['assignment'])
@@ -2756,7 +2935,7 @@ class AssignmentInheritanceTestCase(test_v3.RestfulTestCase):
self.assertRoleAssignmentInListResponse(r, inher_up_entity)
# Assert that the user does not have non-inherited role on leaf project
- non_inher_up_entity = _build_role_assignment_entity(
+ non_inher_up_entity = self.build_role_assignment_entity(
project_id=leaf_id, user_id=self.user['id'],
role_id=non_inherited_role_id)
self.assertRoleAssignmentNotInListResponse(r, non_inher_up_entity)
@@ -2784,13 +2963,13 @@ class AssignmentInheritanceTestCase(test_v3.RestfulTestCase):
self._setup_hierarchical_projects_scenario())
# Grant non-inherited role
- non_inher_up_entity = _build_role_assignment_entity(
+ non_inher_up_entity = self.build_role_assignment_entity(
project_id=root_id, user_id=self.user['id'],
role_id=non_inherited_role_id)
self.put(non_inher_up_entity['links']['assignment'])
# Grant inherited role
- inher_up_entity = _build_role_assignment_entity(
+ inher_up_entity = self.build_role_assignment_entity(
project_id=root_id, user_id=self.user['id'],
role_id=inherited_role_id, inherited_to_projects=True)
self.put(inher_up_entity['links']['assignment'])
@@ -2808,7 +2987,7 @@ class AssignmentInheritanceTestCase(test_v3.RestfulTestCase):
self.assertRoleAssignmentNotInListResponse(r, inher_up_entity)
# Assert that the user does not have non-inherited role on leaf project
- non_inher_up_entity = _build_role_assignment_entity(
+ non_inher_up_entity = self.build_role_assignment_entity(
project_id=leaf_id, user_id=self.user['id'],
role_id=non_inherited_role_id)
self.assertRoleAssignmentNotInListResponse(r, non_inher_up_entity)
@@ -2835,13 +3014,13 @@ class AssignmentInheritanceTestCase(test_v3.RestfulTestCase):
self._setup_hierarchical_projects_scenario())
# Grant non-inherited role
- non_inher_up_entity = _build_role_assignment_entity(
+ non_inher_up_entity = self.build_role_assignment_entity(
project_id=root_id, user_id=self.user['id'],
role_id=non_inherited_role_id)
self.put(non_inher_up_entity['links']['assignment'])
# Grant inherited role
- inher_up_entity = _build_role_assignment_entity(
+ inher_up_entity = self.build_role_assignment_entity(
project_id=root_id, user_id=self.user['id'],
role_id=inherited_role_id, inherited_to_projects=True)
self.put(inher_up_entity['links']['assignment'])
@@ -2860,7 +3039,7 @@ class AssignmentInheritanceTestCase(test_v3.RestfulTestCase):
self.assertRoleAssignmentInListResponse(r, inher_up_entity)
# Assert that the user does not have non-inherited role on leaf project
- non_inher_up_entity = _build_role_assignment_entity(
+ non_inher_up_entity = self.build_role_assignment_entity(
project_id=leaf_id, user_id=self.user['id'],
role_id=non_inherited_role_id)
self.assertRoleAssignmentNotInListResponse(r, non_inher_up_entity)
@@ -2898,11 +3077,32 @@ class AssignmentInheritanceDisabledTestCase(test_v3.RestfulTestCase):
class AssignmentV3toV2MethodsTestCase(tests.TestCase):
"""Test domain V3 to V2 conversion methods."""
+ def _setup_initial_projects(self):
+ self.project_id = uuid.uuid4().hex
+ self.domain_id = CONF.identity.default_domain_id
+ self.parent_id = uuid.uuid4().hex
+ # Project with only domain_id in ref
+ self.project1 = {'id': self.project_id,
+ 'name': self.project_id,
+ 'domain_id': self.domain_id}
+ # Project with both domain_id and parent_id in ref
+ self.project2 = {'id': self.project_id,
+ 'name': self.project_id,
+ 'domain_id': self.domain_id,
+ 'parent_id': self.parent_id}
+ # Project with no domain_id and parent_id in ref
+ self.project3 = {'id': self.project_id,
+ 'name': self.project_id,
+ 'domain_id': self.domain_id,
+ 'parent_id': self.parent_id}
+ # Expected result with no domain_id and parent_id
+ self.expected_project = {'id': self.project_id,
+ 'name': self.project_id}
def test_v2controller_filter_domain_id(self):
# V2.0 is not domain aware, ensure domain_id is popped off the ref.
other_data = uuid.uuid4().hex
- domain_id = uuid.uuid4().hex
+ domain_id = CONF.identity.default_domain_id
ref = {'domain_id': domain_id,
'other_data': other_data}
@@ -2941,3 +3141,52 @@ class AssignmentV3toV2MethodsTestCase(tests.TestCase):
self.assertRaises(exception.Unauthorized,
controller.V2Controller.filter_domain,
non_default_domain_ref)
+
+ def test_v2controller_filter_project_parent_id(self):
+ # V2.0 is not project hierarchy aware, ensure parent_id is popped off.
+ other_data = uuid.uuid4().hex
+ parent_id = uuid.uuid4().hex
+ ref = {'parent_id': parent_id,
+ 'other_data': other_data}
+
+ ref_no_parent = {'other_data': other_data}
+ expected_ref = ref_no_parent.copy()
+
+ updated_ref = controller.V2Controller.filter_project_parent_id(ref)
+ self.assertIs(ref, updated_ref)
+ self.assertDictEqual(ref, expected_ref)
+ # Make sure we don't error/muck up data if parent_id isn't present
+ updated_ref = controller.V2Controller.filter_project_parent_id(
+ ref_no_parent)
+ self.assertIs(ref_no_parent, updated_ref)
+ self.assertDictEqual(ref_no_parent, expected_ref)
+
+ def test_v3_to_v2_project_method(self):
+ self._setup_initial_projects()
+ updated_project1 = controller.V2Controller.v3_to_v2_project(
+ self.project1)
+ self.assertIs(self.project1, updated_project1)
+ self.assertDictEqual(self.project1, self.expected_project)
+ updated_project2 = controller.V2Controller.v3_to_v2_project(
+ self.project2)
+ self.assertIs(self.project2, updated_project2)
+ self.assertDictEqual(self.project2, self.expected_project)
+ updated_project3 = controller.V2Controller.v3_to_v2_project(
+ self.project3)
+ self.assertIs(self.project3, updated_project3)
+ self.assertDictEqual(self.project3, self.expected_project)
+
+ def test_v3_to_v2_project_method_list(self):
+ self._setup_initial_projects()
+ project_list = [self.project1, self.project2, self.project3]
+ updated_list = controller.V2Controller.v3_to_v2_project(project_list)
+
+ self.assertEqual(len(updated_list), len(project_list))
+
+ for i, ref in enumerate(updated_list):
+ # Order should not change.
+ self.assertIs(ref, project_list[i])
+
+ self.assertDictEqual(self.project1, self.expected_project)
+ self.assertDictEqual(self.project2, self.expected_project)
+ self.assertDictEqual(self.project3, self.expected_project)
diff --git a/keystone-moon/keystone/tests/unit/test_v3_auth.py b/keystone-moon/keystone/tests/unit/test_v3_auth.py
index ec079170..96f0ff1f 100644
--- a/keystone-moon/keystone/tests/unit/test_v3_auth.py
+++ b/keystone-moon/keystone/tests/unit/test_v3_auth.py
@@ -22,18 +22,18 @@ from keystoneclient.common import cms
import mock
from oslo_config import cfg
from oslo_utils import timeutils
-import six
+from six.moves import range
from testtools import matchers
from testtools import testcase
from keystone import auth
+from keystone.common import utils
from keystone import exception
from keystone.policy.backends import rules
from keystone.tests import unit as tests
from keystone.tests.unit import ksfixtures
from keystone.tests.unit import test_v3
-
CONF = cfg.CONF
@@ -97,8 +97,8 @@ class TestAuthInfo(test_v3.AuthTestMixin, testcase.TestCase):
'password', 'password']
context = None
auth_info = auth.controllers.AuthInfo.create(context, auth_data)
- self.assertEqual(auth_info.get_method_names(),
- ['password', 'token'])
+ self.assertEqual(['password', 'token'],
+ auth_info.get_method_names())
def test_get_method_data_invalid_method(self):
auth_data = self.build_authentication_request(
@@ -114,276 +114,294 @@ class TestAuthInfo(test_v3.AuthTestMixin, testcase.TestCase):
class TokenAPITests(object):
- # Why is this not just setUP? Because TokenAPITests is not a test class
+ # Why is this not just setUp? Because TokenAPITests is not a test class
# itself. If TokenAPITests became a subclass of the testcase, it would get
# called by the enumerate-tests-in-file code. The way the functions get
# resolved in Python for multiple inheritance means that a setUp in this
# would get skipped by the testrunner.
def doSetUp(self):
- auth_data = self.build_authentication_request(
+ r = self.v3_authenticate_token(self.build_authentication_request(
username=self.user['name'],
user_domain_id=self.domain_id,
- password=self.user['password'])
- resp = self.v3_authenticate_token(auth_data)
- self.token_data = resp.result
- self.token = resp.headers.get('X-Subject-Token')
- self.headers = {'X-Subject-Token': resp.headers.get('X-Subject-Token')}
+ password=self.user['password']))
+ self.v3_token_data = r.result
+ self.v3_token = r.headers.get('X-Subject-Token')
+ self.headers = {'X-Subject-Token': r.headers.get('X-Subject-Token')}
def test_default_fixture_scope_token(self):
self.assertIsNotNone(self.get_scoped_token())
- def verify_token(self, *args, **kwargs):
- return cms.verify_token(*args, **kwargs)
-
- def test_v3_token_id(self):
- auth_data = self.build_authentication_request(
- user_id=self.user['id'],
- password=self.user['password'])
- resp = self.v3_authenticate_token(auth_data)
- token_data = resp.result
- token_id = resp.headers.get('X-Subject-Token')
- self.assertIn('expires_at', token_data['token'])
-
- decoded_token = self.verify_token(token_id, CONF.signing.certfile,
- CONF.signing.ca_certs)
- decoded_token_dict = json.loads(decoded_token)
-
- token_resp_dict = json.loads(resp.body)
-
- self.assertEqual(decoded_token_dict, token_resp_dict)
- # should be able to validate hash PKI token as well
- hash_token_id = cms.cms_hash_token(token_id)
- headers = {'X-Subject-Token': hash_token_id}
- resp = self.get('/auth/tokens', headers=headers)
- expected_token_data = resp.result
- self.assertDictEqual(expected_token_data, token_data)
-
def test_v3_v2_intermix_non_default_domain_failed(self):
- auth_data = self.build_authentication_request(
+ v3_token = self.get_requested_token(self.build_authentication_request(
user_id=self.user['id'],
- password=self.user['password'])
- token = self.get_requested_token(auth_data)
+ password=self.user['password']))
# now validate the v3 token with v2 API
- path = '/v2.0/tokens/%s' % (token)
- self.admin_request(path=path,
- token='ADMIN',
- method='GET',
- expected_status=401)
+ self.admin_request(
+ path='/v2.0/tokens/%s' % v3_token,
+ token=CONF.admin_token,
+ method='GET',
+ expected_status=401)
def test_v3_v2_intermix_new_default_domain(self):
# If the default_domain_id config option is changed, then should be
# able to validate a v3 token with user in the new domain.
# 1) Create a new domain for the user.
- new_domain_id = uuid.uuid4().hex
new_domain = {
'description': uuid.uuid4().hex,
'enabled': True,
- 'id': new_domain_id,
+ 'id': uuid.uuid4().hex,
'name': uuid.uuid4().hex,
}
-
- self.resource_api.create_domain(new_domain_id, new_domain)
+ self.resource_api.create_domain(new_domain['id'], new_domain)
# 2) Create user in new domain.
new_user_password = uuid.uuid4().hex
new_user = {
'name': uuid.uuid4().hex,
- 'domain_id': new_domain_id,
+ 'domain_id': new_domain['id'],
'password': new_user_password,
'email': uuid.uuid4().hex,
}
-
new_user = self.identity_api.create_user(new_user)
# 3) Update the default_domain_id config option to the new domain
+ self.config_fixture.config(
+ group='identity',
+ default_domain_id=new_domain['id'])
- self.config_fixture.config(group='identity',
- default_domain_id=new_domain_id)
-
- # 4) Get a token using v3 api.
-
- auth_data = self.build_authentication_request(
+ # 4) Get a token using v3 API.
+ v3_token = self.get_requested_token(self.build_authentication_request(
user_id=new_user['id'],
- password=new_user_password)
- token = self.get_requested_token(auth_data)
+ password=new_user_password))
- # 5) Authenticate token using v2 api.
-
- path = '/v2.0/tokens/%s' % (token)
- self.admin_request(path=path,
- token='ADMIN',
- method='GET')
+ # 5) Validate token using v2 API.
+ self.admin_request(
+ path='/v2.0/tokens/%s' % v3_token,
+ token=CONF.admin_token,
+ method='GET')
def test_v3_v2_intermix_domain_scoped_token_failed(self):
# grant the domain role to user
- path = '/domains/%s/users/%s/roles/%s' % (
- self.domain['id'], self.user['id'], self.role['id'])
- self.put(path=path)
- auth_data = self.build_authentication_request(
+ self.put(
+ path='/domains/%s/users/%s/roles/%s' % (
+ self.domain['id'], self.user['id'], self.role['id']))
+
+ # generate a domain-scoped v3 token
+ v3_token = self.get_requested_token(self.build_authentication_request(
user_id=self.user['id'],
password=self.user['password'],
- domain_id=self.domain['id'])
- token = self.get_requested_token(auth_data)
+ domain_id=self.domain['id']))
- # now validate the v3 token with v2 API
- path = '/v2.0/tokens/%s' % (token)
- self.admin_request(path=path,
- token='ADMIN',
- method='GET',
- expected_status=401)
+ # domain-scoped tokens are not supported by v2
+ self.admin_request(
+ method='GET',
+ path='/v2.0/tokens/%s' % v3_token,
+ token=CONF.admin_token,
+ expected_status=401)
def test_v3_v2_intermix_non_default_project_failed(self):
- auth_data = self.build_authentication_request(
+ # self.project is in a non-default domain
+ v3_token = self.get_requested_token(self.build_authentication_request(
user_id=self.default_domain_user['id'],
password=self.default_domain_user['password'],
- project_id=self.project['id'])
- token = self.get_requested_token(auth_data)
+ project_id=self.project['id']))
- # now validate the v3 token with v2 API
- path = '/v2.0/tokens/%s' % (token)
- self.admin_request(path=path,
- token='ADMIN',
- method='GET',
- expected_status=401)
+ # v2 cannot reference projects outside the default domain
+ self.admin_request(
+ method='GET',
+ path='/v2.0/tokens/%s' % v3_token,
+ token=CONF.admin_token,
+ expected_status=401)
+
+ def test_v3_v2_intermix_non_default_user_failed(self):
+ self.assignment_api.create_grant(
+ self.role['id'],
+ user_id=self.user['id'],
+ project_id=self.default_domain_project['id'])
+
+ # self.user is in a non-default domain
+ v3_token = self.get_requested_token(self.build_authentication_request(
+ user_id=self.user['id'],
+ password=self.user['password'],
+ project_id=self.default_domain_project['id']))
+
+ # v2 cannot reference projects outside the default domain
+ self.admin_request(
+ method='GET',
+ path='/v2.0/tokens/%s' % v3_token,
+ token=CONF.admin_token,
+ expected_status=401)
+
+ def test_v3_v2_intermix_domain_scope_failed(self):
+ self.assignment_api.create_grant(
+ self.role['id'],
+ user_id=self.default_domain_user['id'],
+ domain_id=self.domain['id'])
+
+ v3_token = self.get_requested_token(self.build_authentication_request(
+ user_id=self.default_domain_user['id'],
+ password=self.default_domain_user['password'],
+ domain_id=self.domain['id']))
+
+ # v2 cannot reference projects outside the default domain
+ self.admin_request(
+ path='/v2.0/tokens/%s' % v3_token,
+ token=CONF.admin_token,
+ method='GET',
+ expected_status=401)
def test_v3_v2_unscoped_token_intermix(self):
- auth_data = self.build_authentication_request(
+ r = self.v3_authenticate_token(self.build_authentication_request(
user_id=self.default_domain_user['id'],
- password=self.default_domain_user['password'])
- resp = self.v3_authenticate_token(auth_data)
- token_data = resp.result
- token = resp.headers.get('X-Subject-Token')
+ password=self.default_domain_user['password']))
+ self.assertValidUnscopedTokenResponse(r)
+ v3_token_data = r.result
+ v3_token = r.headers.get('X-Subject-Token')
# now validate the v3 token with v2 API
- path = '/v2.0/tokens/%s' % (token)
- resp = self.admin_request(path=path,
- token='ADMIN',
- method='GET')
- v2_token = resp.result
- self.assertEqual(v2_token['access']['user']['id'],
- token_data['token']['user']['id'])
+ r = self.admin_request(
+ path='/v2.0/tokens/%s' % v3_token,
+ token=CONF.admin_token,
+ method='GET')
+ v2_token_data = r.result
+
+ self.assertEqual(v2_token_data['access']['user']['id'],
+ v3_token_data['token']['user']['id'])
# v2 token time has not fraction of second precision so
# just need to make sure the non fraction part agrees
- self.assertIn(v2_token['access']['token']['expires'][:-1],
- token_data['token']['expires_at'])
+ self.assertIn(v2_token_data['access']['token']['expires'][:-1],
+ v3_token_data['token']['expires_at'])
def test_v3_v2_token_intermix(self):
# FIXME(gyee): PKI tokens are not interchangeable because token
# data is baked into the token itself.
- auth_data = self.build_authentication_request(
+ r = self.v3_authenticate_token(self.build_authentication_request(
user_id=self.default_domain_user['id'],
password=self.default_domain_user['password'],
- project_id=self.default_domain_project['id'])
- resp = self.v3_authenticate_token(auth_data)
- token_data = resp.result
- token = resp.headers.get('X-Subject-Token')
+ project_id=self.default_domain_project['id']))
+ self.assertValidProjectScopedTokenResponse(r)
+ v3_token_data = r.result
+ v3_token = r.headers.get('X-Subject-Token')
# now validate the v3 token with v2 API
- path = '/v2.0/tokens/%s' % (token)
- resp = self.admin_request(path=path,
- token='ADMIN',
- method='GET')
- v2_token = resp.result
- self.assertEqual(v2_token['access']['user']['id'],
- token_data['token']['user']['id'])
- # v2 token time has not fraction of second precision so
- # just need to make sure the non fraction part agrees
- self.assertIn(v2_token['access']['token']['expires'][:-1],
- token_data['token']['expires_at'])
- self.assertEqual(v2_token['access']['user']['roles'][0]['id'],
- token_data['token']['roles'][0]['id'])
+ r = self.admin_request(
+ method='GET',
+ path='/v2.0/tokens/%s' % v3_token,
+ token=CONF.admin_token)
+ v2_token_data = r.result
- def test_v3_v2_hashed_pki_token_intermix(self):
- auth_data = self.build_authentication_request(
- user_id=self.default_domain_user['id'],
- password=self.default_domain_user['password'],
- project_id=self.default_domain_project['id'])
- resp = self.v3_authenticate_token(auth_data)
- token_data = resp.result
- token = resp.headers.get('X-Subject-Token')
-
- # should be able to validate a hash PKI token in v2 too
- token = cms.cms_hash_token(token)
- path = '/v2.0/tokens/%s' % (token)
- resp = self.admin_request(path=path,
- token='ADMIN',
- method='GET')
- v2_token = resp.result
- self.assertEqual(v2_token['access']['user']['id'],
- token_data['token']['user']['id'])
+ self.assertEqual(v2_token_data['access']['user']['id'],
+ v3_token_data['token']['user']['id'])
# v2 token time has not fraction of second precision so
# just need to make sure the non fraction part agrees
- self.assertIn(v2_token['access']['token']['expires'][:-1],
- token_data['token']['expires_at'])
- self.assertEqual(v2_token['access']['user']['roles'][0]['id'],
- token_data['token']['roles'][0]['id'])
+ self.assertIn(v2_token_data['access']['token']['expires'][:-1],
+ v3_token_data['token']['expires_at'])
+ self.assertEqual(v2_token_data['access']['user']['roles'][0]['name'],
+ v3_token_data['token']['roles'][0]['name'])
def test_v2_v3_unscoped_token_intermix(self):
- body = {
- 'auth': {
- 'passwordCredentials': {
- 'userId': self.user['id'],
- 'password': self.user['password']
+ r = self.admin_request(
+ method='POST',
+ path='/v2.0/tokens',
+ body={
+ 'auth': {
+ 'passwordCredentials': {
+ 'userId': self.default_domain_user['id'],
+ 'password': self.default_domain_user['password']
+ }
}
- }}
- resp = self.admin_request(path='/v2.0/tokens',
- method='POST',
- body=body)
- v2_token_data = resp.result
+ })
+ v2_token_data = r.result
v2_token = v2_token_data['access']['token']['id']
- headers = {'X-Subject-Token': v2_token}
- resp = self.get('/auth/tokens', headers=headers)
- token_data = resp.result
+
+ r = self.get('/auth/tokens', headers={'X-Subject-Token': v2_token})
+ # FIXME(dolph): Due to bug 1476329, v2 tokens validated on v3 are
+ # missing timezones, so they will not pass this assertion.
+ # self.assertValidUnscopedTokenResponse(r)
+ v3_token_data = r.result
+
self.assertEqual(v2_token_data['access']['user']['id'],
- token_data['token']['user']['id'])
+ v3_token_data['token']['user']['id'])
# v2 token time has not fraction of second precision so
# just need to make sure the non fraction part agrees
self.assertIn(v2_token_data['access']['token']['expires'][-1],
- token_data['token']['expires_at'])
+ v3_token_data['token']['expires_at'])
def test_v2_v3_token_intermix(self):
- body = {
- 'auth': {
- 'passwordCredentials': {
- 'userId': self.user['id'],
- 'password': self.user['password']
- },
- 'tenantId': self.project['id']
- }}
- resp = self.admin_request(path='/v2.0/tokens',
- method='POST',
- body=body)
- v2_token_data = resp.result
+ r = self.admin_request(
+ path='/v2.0/tokens',
+ method='POST',
+ body={
+ 'auth': {
+ 'passwordCredentials': {
+ 'userId': self.default_domain_user['id'],
+ 'password': self.default_domain_user['password']
+ },
+ 'tenantId': self.default_domain_project['id']
+ }
+ })
+ v2_token_data = r.result
v2_token = v2_token_data['access']['token']['id']
- headers = {'X-Subject-Token': v2_token}
- resp = self.get('/auth/tokens', headers=headers)
- token_data = resp.result
+
+ r = self.get('/auth/tokens', headers={'X-Subject-Token': v2_token})
+ # FIXME(dolph): Due to bug 1476329, v2 tokens validated on v3 are
+ # missing timezones, so they will not pass this assertion.
+ # self.assertValidProjectScopedTokenResponse(r)
+ v3_token_data = r.result
+
self.assertEqual(v2_token_data['access']['user']['id'],
- token_data['token']['user']['id'])
+ v3_token_data['token']['user']['id'])
# v2 token time has not fraction of second precision so
# just need to make sure the non fraction part agrees
self.assertIn(v2_token_data['access']['token']['expires'][-1],
- token_data['token']['expires_at'])
+ v3_token_data['token']['expires_at'])
self.assertEqual(v2_token_data['access']['user']['roles'][0]['name'],
- token_data['token']['roles'][0]['name'])
+ v3_token_data['token']['roles'][0]['name'])
v2_issued_at = timeutils.parse_isotime(
v2_token_data['access']['token']['issued_at'])
v3_issued_at = timeutils.parse_isotime(
- token_data['token']['issued_at'])
+ v3_token_data['token']['issued_at'])
self.assertEqual(v2_issued_at, v3_issued_at)
+ def test_v2_token_deleted_on_v3(self):
+ # Create a v2 token.
+ body = {
+ 'auth': {
+ 'passwordCredentials': {
+ 'userId': self.default_domain_user['id'],
+ 'password': self.default_domain_user['password']
+ },
+ 'tenantId': self.default_domain_project['id']
+ }
+ }
+ r = self.admin_request(
+ path='/v2.0/tokens', method='POST', body=body)
+ v2_token = r.result['access']['token']['id']
+
+ # Delete the v2 token using v3.
+ resp = self.delete(
+ '/auth/tokens', headers={'X-Subject-Token': v2_token})
+ self.assertEqual(resp.status_code, 204)
+
+ # Attempting to use the deleted token on v2 should fail.
+ self.admin_request(
+ path='/v2.0/tenants', method='GET', token=v2_token,
+ expected_status=401)
+
def test_rescoping_token(self):
- expires = self.token_data['token']['expires_at']
- auth_data = self.build_authentication_request(
- token=self.token,
- project_id=self.project_id)
- r = self.v3_authenticate_token(auth_data)
+ expires = self.v3_token_data['token']['expires_at']
+
+ # rescope the token
+ r = self.v3_authenticate_token(self.build_authentication_request(
+ token=self.v3_token,
+ project_id=self.project_id))
self.assertValidProjectScopedTokenResponse(r)
- # make sure expires stayed the same
+
+ # ensure token expiration stayed the same
self.assertEqual(expires, r.result['token']['expires_at'])
def test_check_token(self):
@@ -394,12 +412,13 @@ class TokenAPITests(object):
self.assertValidUnscopedTokenResponse(r)
def test_validate_token_nocatalog(self):
- auth_data = self.build_authentication_request(
+ v3_token = self.get_requested_token(self.build_authentication_request(
user_id=self.user['id'],
password=self.user['password'],
- project_id=self.project['id'])
- headers = {'X-Subject-Token': self.get_requested_token(auth_data)}
- r = self.get('/auth/tokens?nocatalog', headers=headers)
+ project_id=self.project['id']))
+ r = self.get(
+ '/auth/tokens?nocatalog',
+ headers={'X-Subject-Token': v3_token})
self.assertValidProjectScopedTokenResponse(r, require_catalog=False)
@@ -420,10 +439,10 @@ class AllowRescopeScopedTokenDisabledTests(test_v3.RestfulTestCase):
def _v2_token(self):
body = {
'auth': {
- "tenantId": self.project['id'],
+ "tenantId": self.default_domain_project['id'],
'passwordCredentials': {
- 'userId': self.user['id'],
- 'password': self.user['password']
+ 'userId': self.default_domain_user['id'],
+ 'password': self.default_domain_user['password']
}
}}
resp = self.admin_request(path='/v2.0/tokens',
@@ -462,7 +481,7 @@ class AllowRescopeScopedTokenDisabledTests(test_v3.RestfulTestCase):
def test_rescoped_domain_token_disabled(self):
self.domainA = self.new_domain_ref()
- self.assignment_api.create_domain(self.domainA['id'], self.domainA)
+ self.resource_api.create_domain(self.domainA['id'], self.domainA)
self.assignment_api.create_grant(self.role['id'],
user_id=self.user['id'],
domain_id=self.domainA['id'])
@@ -485,37 +504,77 @@ class AllowRescopeScopedTokenDisabledTests(test_v3.RestfulTestCase):
class TestPKITokenAPIs(test_v3.RestfulTestCase, TokenAPITests):
def config_overrides(self):
super(TestPKITokenAPIs, self).config_overrides()
- self.config_fixture.config(
- group='token',
- provider='keystone.token.providers.pki.Provider')
+ self.config_fixture.config(group='token', provider='pki')
def setUp(self):
super(TestPKITokenAPIs, self).setUp()
self.doSetUp()
+ def verify_token(self, *args, **kwargs):
+ return cms.verify_token(*args, **kwargs)
-class TestPKIZTokenAPIs(test_v3.RestfulTestCase, TokenAPITests):
+ def test_v3_token_id(self):
+ auth_data = self.build_authentication_request(
+ user_id=self.user['id'],
+ password=self.user['password'])
+ resp = self.v3_authenticate_token(auth_data)
+ token_data = resp.result
+ token_id = resp.headers.get('X-Subject-Token')
+ self.assertIn('expires_at', token_data['token'])
- def verify_token(self, *args, **kwargs):
- return cms.pkiz_verify(*args, **kwargs)
+ decoded_token = self.verify_token(token_id, CONF.signing.certfile,
+ CONF.signing.ca_certs)
+ decoded_token_dict = json.loads(decoded_token)
+
+ token_resp_dict = json.loads(resp.body)
+
+ self.assertEqual(decoded_token_dict, token_resp_dict)
+ # should be able to validate hash PKI token as well
+ hash_token_id = cms.cms_hash_token(token_id)
+ headers = {'X-Subject-Token': hash_token_id}
+ resp = self.get('/auth/tokens', headers=headers)
+ expected_token_data = resp.result
+ self.assertDictEqual(expected_token_data, token_data)
+ def test_v3_v2_hashed_pki_token_intermix(self):
+ auth_data = self.build_authentication_request(
+ user_id=self.default_domain_user['id'],
+ password=self.default_domain_user['password'],
+ project_id=self.default_domain_project['id'])
+ resp = self.v3_authenticate_token(auth_data)
+ token_data = resp.result
+ token = resp.headers.get('X-Subject-Token')
+
+ # should be able to validate a hash PKI token in v2 too
+ token = cms.cms_hash_token(token)
+ path = '/v2.0/tokens/%s' % (token)
+ resp = self.admin_request(path=path,
+ token=CONF.admin_token,
+ method='GET')
+ v2_token = resp.result
+ self.assertEqual(v2_token['access']['user']['id'],
+ token_data['token']['user']['id'])
+ # v2 token time has not fraction of second precision so
+ # just need to make sure the non fraction part agrees
+ self.assertIn(v2_token['access']['token']['expires'][:-1],
+ token_data['token']['expires_at'])
+ self.assertEqual(v2_token['access']['user']['roles'][0]['id'],
+ token_data['token']['roles'][0]['id'])
+
+
+class TestPKIZTokenAPIs(TestPKITokenAPIs):
def config_overrides(self):
super(TestPKIZTokenAPIs, self).config_overrides()
- self.config_fixture.config(
- group='token',
- provider='keystone.token.providers.pkiz.Provider')
+ self.config_fixture.config(group='token', provider='pkiz')
- def setUp(self):
- super(TestPKIZTokenAPIs, self).setUp()
- self.doSetUp()
+ def verify_token(self, *args, **kwargs):
+ return cms.pkiz_verify(*args, **kwargs)
class TestUUIDTokenAPIs(test_v3.RestfulTestCase, TokenAPITests):
def config_overrides(self):
super(TestUUIDTokenAPIs, self).config_overrides()
- self.config_fixture.config(
- group='token',
- provider='keystone.token.providers.uuid.Provider')
+ self.config_fixture.config(group='token', provider='uuid')
def setUp(self):
super(TestUUIDTokenAPIs, self).setUp()
@@ -531,10 +590,16 @@ class TestUUIDTokenAPIs(test_v3.RestfulTestCase, TokenAPITests):
self.assertIn('expires_at', token_data['token'])
self.assertFalse(cms.is_asn1_token(token_id))
- def test_v3_v2_hashed_pki_token_intermix(self):
- # this test is only applicable for PKI tokens
- # skipping it for UUID tokens
- pass
+
+class TestFernetTokenAPIs(test_v3.RestfulTestCase, TokenAPITests):
+ def config_overrides(self):
+ super(TestFernetTokenAPIs, self).config_overrides()
+ self.config_fixture.config(group='token', provider='fernet')
+ self.useFixture(ksfixtures.KeyRepository(self.config_fixture))
+
+ def setUp(self):
+ super(TestFernetTokenAPIs, self).setUp()
+ self.doSetUp()
class TestTokenRevokeSelfAndAdmin(test_v3.RestfulTestCase):
@@ -675,12 +740,10 @@ class TestTokenRevokeById(test_v3.RestfulTestCase):
def config_overrides(self):
super(TestTokenRevokeById, self).config_overrides()
- self.config_fixture.config(
- group='revoke',
- driver='keystone.contrib.revoke.backends.kvs.Revoke')
+ self.config_fixture.config(group='revoke', driver='kvs')
self.config_fixture.config(
group='token',
- provider='keystone.token.providers.pki.Provider',
+ provider='pki',
revoke_by_id=False)
def setUp(self):
@@ -1069,7 +1132,7 @@ class TestTokenRevokeById(test_v3.RestfulTestCase):
- Delete the grant group1 has on ProjectA
- Check tokens for user1 & user2 are no longer valid,
since user1 and user2 are members of group1
- - Check token for user3 is still valid
+ - Check token for user3 is invalid too
"""
auth_data = self.build_authentication_request(
@@ -1112,10 +1175,11 @@ class TestTokenRevokeById(test_v3.RestfulTestCase):
self.head('/auth/tokens',
headers={'X-Subject-Token': token2},
expected_status=404)
- # But user3's token should still be valid
+ # But user3's token should be invalid too as revocation is done for
+ # scope role & project
self.head('/auth/tokens',
headers={'X-Subject-Token': token3},
- expected_status=200)
+ expected_status=404)
def test_domain_group_role_assignment_maintains_token(self):
"""Test domain-group role assignment maintains existing token.
@@ -1202,6 +1266,14 @@ class TestTokenRevokeById(test_v3.RestfulTestCase):
def test_removing_role_assignment_does_not_affect_other_users(self):
"""Revoking a role from one user should not affect other users."""
+
+ # This group grant is not needed for the test
+ self.delete(
+ '/projects/%(project_id)s/groups/%(group_id)s/roles/%(role_id)s' %
+ {'project_id': self.projectA['id'],
+ 'group_id': self.group1['id'],
+ 'role_id': self.role1['id']})
+
user1_token = self.get_requested_token(
self.build_authentication_request(
user_id=self.user1['id'],
@@ -1220,12 +1292,6 @@ class TestTokenRevokeById(test_v3.RestfulTestCase):
'project_id': self.projectA['id'],
'user_id': self.user1['id'],
'role_id': self.role1['id']})
- self.delete(
- '/projects/%(project_id)s/groups/%(group_id)s/roles/%(role_id)s' %
- {'project_id': self.projectA['id'],
- 'group_id': self.group1['id'],
- 'role_id': self.role1['id']})
-
# authorization for the first user should now fail
self.head('/auth/tokens',
headers={'X-Subject-Token': user1_token},
@@ -1384,6 +1450,58 @@ class TestTokenRevokeById(test_v3.RestfulTestCase):
expected_status=200)
+class TestTokenRevokeByAssignment(TestTokenRevokeById):
+
+ def config_overrides(self):
+ super(TestTokenRevokeById, self).config_overrides()
+ self.config_fixture.config(
+ group='revoke',
+ driver='kvs')
+ self.config_fixture.config(
+ group='token',
+ provider='uuid',
+ revoke_by_id=True)
+
+ def test_removing_role_assignment_keeps_other_project_token_groups(self):
+ """Test assignment isolation.
+
+ Revoking a group role from one project should not invalidate all group
+ users' tokens
+ """
+ self.assignment_api.create_grant(self.role1['id'],
+ group_id=self.group1['id'],
+ project_id=self.projectB['id'])
+
+ project_token = self.get_requested_token(
+ self.build_authentication_request(
+ user_id=self.user1['id'],
+ password=self.user1['password'],
+ project_id=self.projectB['id']))
+
+ other_project_token = self.get_requested_token(
+ self.build_authentication_request(
+ user_id=self.user1['id'],
+ password=self.user1['password'],
+ project_id=self.projectA['id']))
+
+ self.assignment_api.delete_grant(self.role1['id'],
+ group_id=self.group1['id'],
+ project_id=self.projectB['id'])
+
+ # authorization for the projectA should still succeed
+ self.head('/auth/tokens',
+ headers={'X-Subject-Token': other_project_token},
+ expected_status=200)
+ # while token for the projectB should not
+ self.head('/auth/tokens',
+ headers={'X-Subject-Token': project_token},
+ expected_status=404)
+ revoked_tokens = [
+ t['id'] for t in self.token_provider_api.list_revoked_tokens()]
+ # token is in token revocation list
+ self.assertIn(project_token, revoked_tokens)
+
+
class TestTokenRevokeApi(TestTokenRevokeById):
EXTENSION_NAME = 'revoke'
EXTENSION_TO_ADD = 'revoke_extension'
@@ -1391,12 +1509,10 @@ class TestTokenRevokeApi(TestTokenRevokeById):
"""Test token revocation on the v3 Identity API."""
def config_overrides(self):
super(TestTokenRevokeApi, self).config_overrides()
- self.config_fixture.config(
- group='revoke',
- driver='keystone.contrib.revoke.backends.kvs.Revoke')
+ self.config_fixture.config(group='revoke', driver='kvs')
self.config_fixture.config(
group='token',
- provider='keystone.token.providers.pki.Provider',
+ provider='pki',
revoke_by_id=False)
def assertValidDeletedProjectResponse(self, events_response, project_id):
@@ -1424,7 +1540,7 @@ class TestTokenRevokeApi(TestTokenRevokeById):
def assertValidRevokedTokenResponse(self, events_response, **kwargs):
events = events_response['events']
self.assertEqual(1, len(events))
- for k, v in six.iteritems(kwargs):
+ for k, v in kwargs.items():
self.assertEqual(v, events[0].get(k))
self.assertIsNotNone(events[0]['issued_before'])
self.assertIsNotNone(events_response['links'])
@@ -1494,7 +1610,7 @@ class TestTokenRevokeApi(TestTokenRevokeById):
def assertEventDataInList(self, events, **kwargs):
found = False
for e in events:
- for key, value in six.iteritems(kwargs):
+ for key, value in kwargs.items():
try:
if e[key] != value:
break
@@ -1512,8 +1628,7 @@ class TestTokenRevokeApi(TestTokenRevokeById):
'find event with key-value pairs. Expected: '
'"%(expected)s" Events: "%(events)s"' %
{'expected': ','.join(
- ["'%s=%s'" % (k, v) for k, v in six.iteritems(
- kwargs)]),
+ ["'%s=%s'" % (k, v) for k, v in kwargs.items()]),
'events': events})
def test_list_delete_token_shows_in_event_list(self):
@@ -1569,8 +1684,8 @@ class TestTokenRevokeApi(TestTokenRevokeById):
expected_status=200).json_body['events']
self.assertEqual(2, len(events))
- future = timeutils.isotime(timeutils.utcnow() +
- datetime.timedelta(seconds=1000))
+ future = utils.isotime(timeutils.utcnow() +
+ datetime.timedelta(seconds=1000))
events = self.get('/OS-REVOKE/events?since=%s' % (future),
expected_status=200).json_body['events']
@@ -1596,148 +1711,116 @@ class TestAuthExternalDisabled(test_v3.RestfulTestCase):
auth_context)
-class TestAuthExternalLegacyDefaultDomain(test_v3.RestfulTestCase):
- content_type = 'json'
-
- def config_overrides(self):
- super(TestAuthExternalLegacyDefaultDomain, self).config_overrides()
- self.auth_plugin_config_override(
- methods=['external', 'password', 'token'],
- external='keystone.auth.plugins.external.LegacyDefaultDomain',
- password='keystone.auth.plugins.password.Password',
- token='keystone.auth.plugins.token.Token')
-
- def test_remote_user_no_realm(self):
- self.config_fixture.config(group='auth', methods='external')
- api = auth.controllers.Auth()
- context, auth_info, auth_context = self.build_external_auth_request(
- self.default_domain_user['name'])
- api.authenticate(context, auth_info, auth_context)
- self.assertEqual(auth_context['user_id'],
- self.default_domain_user['id'])
-
- def test_remote_user_no_domain(self):
- api = auth.controllers.Auth()
- context, auth_info, auth_context = self.build_external_auth_request(
- self.user['name'])
- self.assertRaises(exception.Unauthorized,
- api.authenticate,
- context,
- auth_info,
- auth_context)
-
-
-class TestAuthExternalLegacyDomain(test_v3.RestfulTestCase):
+class TestAuthExternalDomain(test_v3.RestfulTestCase):
content_type = 'json'
def config_overrides(self):
- super(TestAuthExternalLegacyDomain, self).config_overrides()
- self.auth_plugin_config_override(
- methods=['external', 'password', 'token'],
- external='keystone.auth.plugins.external.LegacyDomain',
- password='keystone.auth.plugins.password.Password',
- token='keystone.auth.plugins.token.Token')
+ super(TestAuthExternalDomain, self).config_overrides()
+ self.kerberos = False
+ self.auth_plugin_config_override(external='Domain')
def test_remote_user_with_realm(self):
api = auth.controllers.Auth()
- remote_user = '%s@%s' % (self.user['name'], self.domain['name'])
+ remote_user = self.user['name']
+ remote_domain = self.domain['name']
context, auth_info, auth_context = self.build_external_auth_request(
- remote_user)
+ remote_user, remote_domain=remote_domain, kerberos=self.kerberos)
api.authenticate(context, auth_info, auth_context)
- self.assertEqual(auth_context['user_id'], self.user['id'])
+ self.assertEqual(self.user['id'], auth_context['user_id'])
# Now test to make sure the user name can, itself, contain the
# '@' character.
user = {'name': 'myname@mydivision'}
self.identity_api.update_user(self.user['id'], user)
- remote_user = '%s@%s' % (user['name'], self.domain['name'])
+ remote_user = user['name']
context, auth_info, auth_context = self.build_external_auth_request(
- remote_user)
+ remote_user, remote_domain=remote_domain, kerberos=self.kerberos)
api.authenticate(context, auth_info, auth_context)
- self.assertEqual(auth_context['user_id'], self.user['id'])
+ self.assertEqual(self.user['id'], auth_context['user_id'])
def test_project_id_scoped_with_remote_user(self):
self.config_fixture.config(group='token', bind=['kerberos'])
auth_data = self.build_authentication_request(
- project_id=self.project['id'])
- remote_user = '%s@%s' % (self.user['name'], self.domain['name'])
+ project_id=self.project['id'],
+ kerberos=self.kerberos)
+ remote_user = self.user['name']
+ remote_domain = self.domain['name']
self.admin_app.extra_environ.update({'REMOTE_USER': remote_user,
+ 'REMOTE_DOMAIN': remote_domain,
'AUTH_TYPE': 'Negotiate'})
r = self.v3_authenticate_token(auth_data)
token = self.assertValidProjectScopedTokenResponse(r)
- self.assertEqual(token['bind']['kerberos'], self.user['name'])
+ self.assertEqual(self.user['name'], token['bind']['kerberos'])
def test_unscoped_bind_with_remote_user(self):
self.config_fixture.config(group='token', bind=['kerberos'])
- auth_data = self.build_authentication_request()
- remote_user = '%s@%s' % (self.user['name'], self.domain['name'])
+ auth_data = self.build_authentication_request(kerberos=self.kerberos)
+ remote_user = self.user['name']
+ remote_domain = self.domain['name']
self.admin_app.extra_environ.update({'REMOTE_USER': remote_user,
+ 'REMOTE_DOMAIN': remote_domain,
'AUTH_TYPE': 'Negotiate'})
r = self.v3_authenticate_token(auth_data)
token = self.assertValidUnscopedTokenResponse(r)
- self.assertEqual(token['bind']['kerberos'], self.user['name'])
+ self.assertEqual(self.user['name'], token['bind']['kerberos'])
-class TestAuthExternalDomain(test_v3.RestfulTestCase):
+class TestAuthExternalDefaultDomain(test_v3.RestfulTestCase):
content_type = 'json'
def config_overrides(self):
- super(TestAuthExternalDomain, self).config_overrides()
+ super(TestAuthExternalDefaultDomain, self).config_overrides()
self.kerberos = False
self.auth_plugin_config_override(
- methods=['external', 'password', 'token'],
- external='keystone.auth.plugins.external.Domain',
- password='keystone.auth.plugins.password.Password',
- token='keystone.auth.plugins.token.Token')
+ external='keystone.auth.plugins.external.DefaultDomain')
- def test_remote_user_with_realm(self):
+ def test_remote_user_with_default_domain(self):
api = auth.controllers.Auth()
- remote_user = self.user['name']
- remote_domain = self.domain['name']
+ remote_user = self.default_domain_user['name']
context, auth_info, auth_context = self.build_external_auth_request(
- remote_user, remote_domain=remote_domain, kerberos=self.kerberos)
+ remote_user, kerberos=self.kerberos)
api.authenticate(context, auth_info, auth_context)
- self.assertEqual(auth_context['user_id'], self.user['id'])
+ self.assertEqual(self.default_domain_user['id'],
+ auth_context['user_id'])
# Now test to make sure the user name can, itself, contain the
# '@' character.
user = {'name': 'myname@mydivision'}
- self.identity_api.update_user(self.user['id'], user)
+ self.identity_api.update_user(self.default_domain_user['id'], user)
remote_user = user['name']
context, auth_info, auth_context = self.build_external_auth_request(
- remote_user, remote_domain=remote_domain, kerberos=self.kerberos)
+ remote_user, kerberos=self.kerberos)
api.authenticate(context, auth_info, auth_context)
- self.assertEqual(auth_context['user_id'], self.user['id'])
+ self.assertEqual(self.default_domain_user['id'],
+ auth_context['user_id'])
def test_project_id_scoped_with_remote_user(self):
self.config_fixture.config(group='token', bind=['kerberos'])
auth_data = self.build_authentication_request(
- project_id=self.project['id'],
+ project_id=self.default_domain_project['id'],
kerberos=self.kerberos)
- remote_user = self.user['name']
- remote_domain = self.domain['name']
+ remote_user = self.default_domain_user['name']
self.admin_app.extra_environ.update({'REMOTE_USER': remote_user,
- 'REMOTE_DOMAIN': remote_domain,
'AUTH_TYPE': 'Negotiate'})
r = self.v3_authenticate_token(auth_data)
token = self.assertValidProjectScopedTokenResponse(r)
- self.assertEqual(token['bind']['kerberos'], self.user['name'])
+ self.assertEqual(self.default_domain_user['name'],
+ token['bind']['kerberos'])
def test_unscoped_bind_with_remote_user(self):
self.config_fixture.config(group='token', bind=['kerberos'])
auth_data = self.build_authentication_request(kerberos=self.kerberos)
- remote_user = self.user['name']
- remote_domain = self.domain['name']
+ remote_user = self.default_domain_user['name']
self.admin_app.extra_environ.update({'REMOTE_USER': remote_user,
- 'REMOTE_DOMAIN': remote_domain,
'AUTH_TYPE': 'Negotiate'})
r = self.v3_authenticate_token(auth_data)
token = self.assertValidUnscopedTokenResponse(r)
- self.assertEqual(token['bind']['kerberos'], self.user['name'])
+ self.assertEqual(self.default_domain_user['name'],
+ token['bind']['kerberos'])
class TestAuthKerberos(TestAuthExternalDomain):
@@ -1746,10 +1829,7 @@ class TestAuthKerberos(TestAuthExternalDomain):
super(TestAuthKerberos, self).config_overrides()
self.kerberos = True
self.auth_plugin_config_override(
- methods=['kerberos', 'password', 'token'],
- kerberos='keystone.auth.plugins.external.KerberosDomain',
- password='keystone.auth.plugins.password.Password',
- token='keystone.auth.plugins.token.Token')
+ methods=['kerberos', 'password', 'token'])
class TestAuth(test_v3.RestfulTestCase):
@@ -1815,7 +1895,7 @@ class TestAuth(test_v3.RestfulTestCase):
password=self.user['password'])
r = self.v3_authenticate_token(auth_data)
self.assertValidProjectScopedTokenResponse(r)
- self.assertEqual(r.result['token']['project']['id'], project['id'])
+ self.assertEqual(project['id'], r.result['token']['project']['id'])
def test_default_project_id_scoped_token_with_user_id_no_catalog(self):
project = self._second_project_as_default()
@@ -1826,7 +1906,7 @@ class TestAuth(test_v3.RestfulTestCase):
password=self.user['password'])
r = self.post('/auth/tokens?nocatalog', body=auth_data, noauth=True)
self.assertValidProjectScopedTokenResponse(r, require_catalog=False)
- self.assertEqual(r.result['token']['project']['id'], project['id'])
+ self.assertEqual(project['id'], r.result['token']['project']['id'])
def test_explicit_unscoped_token(self):
self._second_project_as_default()
@@ -1850,8 +1930,8 @@ class TestAuth(test_v3.RestfulTestCase):
project_id=self.project['id'])
r = self.post('/auth/tokens?nocatalog', body=auth_data, noauth=True)
self.assertValidProjectScopedTokenResponse(r, require_catalog=False)
- self.assertEqual(r.result['token']['project']['id'],
- self.project['id'])
+ self.assertEqual(self.project['id'],
+ r.result['token']['project']['id'])
def test_auth_catalog_attributes(self):
auth_data = self.build_authentication_request(
@@ -2345,13 +2425,12 @@ class TestAuth(test_v3.RestfulTestCase):
self.v3_authenticate_token(auth_data, expected_status=401)
def test_remote_user_no_realm(self):
- CONF.auth.methods = 'external'
api = auth.controllers.Auth()
context, auth_info, auth_context = self.build_external_auth_request(
self.default_domain_user['name'])
api.authenticate(context, auth_info, auth_context)
- self.assertEqual(auth_context['user_id'],
- self.default_domain_user['id'])
+ self.assertEqual(self.default_domain_user['id'],
+ auth_context['user_id'])
# Now test to make sure the user name can, itself, contain the
# '@' character.
user = {'name': 'myname@mydivision'}
@@ -2359,8 +2438,8 @@ class TestAuth(test_v3.RestfulTestCase):
context, auth_info, auth_context = self.build_external_auth_request(
user["name"])
api.authenticate(context, auth_info, auth_context)
- self.assertEqual(auth_context['user_id'],
- self.default_domain_user['id'])
+ self.assertEqual(self.default_domain_user['id'],
+ auth_context['user_id'])
def test_remote_user_no_domain(self):
api = auth.controllers.Auth()
@@ -2441,8 +2520,8 @@ class TestAuth(test_v3.RestfulTestCase):
headers = {'X-Subject-Token': token}
r = self.get('/auth/tokens', headers=headers, token=token)
token = self.assertValidProjectScopedTokenResponse(r)
- self.assertEqual(token['bind']['kerberos'],
- self.default_domain_user['name'])
+ self.assertEqual(self.default_domain_user['name'],
+ token['bind']['kerberos'])
def test_auth_with_bind_token(self):
self.config_fixture.config(group='token', bind=['kerberos'])
@@ -2455,7 +2534,7 @@ class TestAuth(test_v3.RestfulTestCase):
# the unscoped token should have bind information in it
token = self.assertValidUnscopedTokenResponse(r)
- self.assertEqual(token['bind']['kerberos'], remote_user)
+ self.assertEqual(remote_user, token['bind']['kerberos'])
token = r.headers.get('X-Subject-Token')
@@ -2466,7 +2545,7 @@ class TestAuth(test_v3.RestfulTestCase):
token = self.assertValidProjectScopedTokenResponse(r)
# the bind information should be carried over from the original token
- self.assertEqual(token['bind']['kerberos'], remote_user)
+ self.assertEqual(remote_user, token['bind']['kerberos'])
def test_v2_v3_bind_token_intermix(self):
self.config_fixture.config(group='token', bind='kerberos')
@@ -2484,7 +2563,7 @@ class TestAuth(test_v3.RestfulTestCase):
v2_token_data = resp.result
bind = v2_token_data['access']['token']['bind']
- self.assertEqual(bind['kerberos'], self.default_domain_user['name'])
+ self.assertEqual(self.default_domain_user['name'], bind['kerberos'])
v2_token_id = v2_token_data['access']['token']['id']
# NOTE(gyee): self.get() will try to obtain an auth token if one
@@ -2613,12 +2692,8 @@ class TestAuth(test_v3.RestfulTestCase):
class TestAuthJSONExternal(test_v3.RestfulTestCase):
content_type = 'json'
- def config_overrides(self):
- super(TestAuthJSONExternal, self).config_overrides()
- self.config_fixture.config(group='auth', methods='')
-
def auth_plugin_config_override(self, methods=None, **method_classes):
- self.config_fixture.config(group='auth', methods='')
+ self.config_fixture.config(group='auth', methods=[])
def test_remote_user_no_method(self):
api = auth.controllers.Auth()
@@ -2787,12 +2862,12 @@ class TestTrustRedelegation(test_v3.RestfulTestCase):
self.post('/OS-TRUST/trusts',
body={'trust': self.chained_trust_ref},
token=trust_token,
- expected_status=403)
+ expected_status=400)
def test_roles_subset(self):
# Build second role
role = self.new_role_ref()
- self.assignment_api.create_role(role['id'], role)
+ self.role_api.create_role(role['id'], role)
# assign a new role to the user
self.assignment_api.create_grant(role_id=role['id'],
user_id=self.user_id,
@@ -2860,7 +2935,7 @@ class TestTrustRedelegation(test_v3.RestfulTestCase):
# Build second trust with a role not in parent's roles
role = self.new_role_ref()
- self.assignment_api.create_role(role['id'], role)
+ self.role_api.create_role(role['id'], role)
# assign a new role to the user
self.assignment_api.create_grant(role_id=role['id'],
user_id=self.user_id,
@@ -2895,7 +2970,7 @@ class TestTrustRedelegation(test_v3.RestfulTestCase):
# Check that allow_redelegation == False caused redelegation_count
# to be set to 0, while allow_redelegation is removed
self.assertNotIn('allow_redelegation', trust)
- self.assertEqual(trust['redelegation_count'], 0)
+ self.assertEqual(0, trust['redelegation_count'])
trust_token = self._get_trust_token(trust)
# Build third trust, same as second
@@ -2921,7 +2996,7 @@ class TestTrustChain(test_v3.RestfulTestCase):
# Create trust chain
self.user_chain = list()
self.trust_chain = list()
- for _ in xrange(3):
+ for _ in range(3):
user_ref = self.new_user_ref(domain_id=self.domain_id)
user = self.identity_api.create_user(user_ref)
user['password'] = user_ref['password']
@@ -3067,12 +3142,10 @@ class TestTrustAuth(test_v3.RestfulTestCase):
def config_overrides(self):
super(TestTrustAuth, self).config_overrides()
- self.config_fixture.config(
- group='revoke',
- driver='keystone.contrib.revoke.backends.kvs.Revoke')
+ self.config_fixture.config(group='revoke', driver='kvs')
self.config_fixture.config(
group='token',
- provider='keystone.token.providers.pki.Provider',
+ provider='pki',
revoke_by_id=False)
self.config_fixture.config(group='trust', enabled=True)
@@ -3139,7 +3212,7 @@ class TestTrustAuth(test_v3.RestfulTestCase):
expected_status=200)
trust = r.result.get('trust')
self.assertIsNotNone(trust)
- self.assertEqual(trust['remaining_uses'], 1)
+ self.assertEqual(1, trust['remaining_uses'])
def test_create_one_time_use_trust(self):
trust = self._initialize_test_consume_trust(1)
@@ -3320,26 +3393,6 @@ class TestTrustAuth(test_v3.RestfulTestCase):
role_names=[uuid.uuid4().hex])
self.post('/OS-TRUST/trusts', body={'trust': ref}, expected_status=404)
- def test_create_expired_trust(self):
- ref = self.new_trust_ref(
- trustor_user_id=self.user_id,
- trustee_user_id=self.trustee_user_id,
- project_id=self.project_id,
- expires=dict(seconds=-1),
- role_ids=[self.role_id])
- r = self.post('/OS-TRUST/trusts', body={'trust': ref})
- trust = self.assertValidTrustResponse(r, ref)
-
- self.get('/OS-TRUST/trusts/%(trust_id)s' % {
- 'trust_id': trust['id']},
- expected_status=404)
-
- auth_data = self.build_authentication_request(
- user_id=self.trustee_user['id'],
- password=self.trustee_user['password'],
- trust_id=trust['id'])
- self.v3_authenticate_token(auth_data, expected_status=401)
-
def test_v3_v2_intermix_trustor_not_in_default_domain_failed(self):
ref = self.new_trust_ref(
trustor_user_id=self.user_id,
@@ -3365,7 +3418,8 @@ class TestTrustAuth(test_v3.RestfulTestCase):
# now validate the v3 token with v2 API
path = '/v2.0/tokens/%s' % (token)
self.admin_request(
- path=path, token='ADMIN', method='GET', expected_status=401)
+ path=path, token=CONF.admin_token,
+ method='GET', expected_status=401)
def test_v3_v2_intermix_trustor_not_in_default_domaini_failed(self):
ref = self.new_trust_ref(
@@ -3397,7 +3451,8 @@ class TestTrustAuth(test_v3.RestfulTestCase):
# now validate the v3 token with v2 API
path = '/v2.0/tokens/%s' % (token)
self.admin_request(
- path=path, token='ADMIN', method='GET', expected_status=401)
+ path=path, token=CONF.admin_token,
+ method='GET', expected_status=401)
def test_v3_v2_intermix_project_not_in_default_domaini_failed(self):
# create a trustee in default domain to delegate stuff to
@@ -3436,7 +3491,8 @@ class TestTrustAuth(test_v3.RestfulTestCase):
# now validate the v3 token with v2 API
path = '/v2.0/tokens/%s' % (token)
self.admin_request(
- path=path, token='ADMIN', method='GET', expected_status=401)
+ path=path, token=CONF.admin_token,
+ method='GET', expected_status=401)
def test_v3_v2_intermix(self):
# create a trustee in default domain to delegate stuff to
@@ -3474,7 +3530,8 @@ class TestTrustAuth(test_v3.RestfulTestCase):
# now validate the v3 token with v2 API
path = '/v2.0/tokens/%s' % (token)
self.admin_request(
- path=path, token='ADMIN', method='GET', expected_status=200)
+ path=path, token=CONF.admin_token,
+ method='GET', expected_status=200)
def test_exercise_trust_scoped_token_without_impersonation(self):
ref = self.new_trust_ref(
@@ -3494,18 +3551,18 @@ class TestTrustAuth(test_v3.RestfulTestCase):
trust_id=trust['id'])
r = self.v3_authenticate_token(auth_data)
self.assertValidProjectTrustScopedTokenResponse(r, self.trustee_user)
- self.assertEqual(r.result['token']['user']['id'],
- self.trustee_user['id'])
- self.assertEqual(r.result['token']['user']['name'],
- self.trustee_user['name'])
- self.assertEqual(r.result['token']['user']['domain']['id'],
- self.domain['id'])
- self.assertEqual(r.result['token']['user']['domain']['name'],
- self.domain['name'])
- self.assertEqual(r.result['token']['project']['id'],
- self.project['id'])
- self.assertEqual(r.result['token']['project']['name'],
- self.project['name'])
+ self.assertEqual(self.trustee_user['id'],
+ r.result['token']['user']['id'])
+ self.assertEqual(self.trustee_user['name'],
+ r.result['token']['user']['name'])
+ self.assertEqual(self.domain['id'],
+ r.result['token']['user']['domain']['id'])
+ self.assertEqual(self.domain['name'],
+ r.result['token']['user']['domain']['name'])
+ self.assertEqual(self.project['id'],
+ r.result['token']['project']['id'])
+ self.assertEqual(self.project['name'],
+ r.result['token']['project']['name'])
def test_exercise_trust_scoped_token_with_impersonation(self):
ref = self.new_trust_ref(
@@ -3525,16 +3582,16 @@ class TestTrustAuth(test_v3.RestfulTestCase):
trust_id=trust['id'])
r = self.v3_authenticate_token(auth_data)
self.assertValidProjectTrustScopedTokenResponse(r, self.user)
- self.assertEqual(r.result['token']['user']['id'], self.user['id'])
- self.assertEqual(r.result['token']['user']['name'], self.user['name'])
- self.assertEqual(r.result['token']['user']['domain']['id'],
- self.domain['id'])
- self.assertEqual(r.result['token']['user']['domain']['name'],
- self.domain['name'])
- self.assertEqual(r.result['token']['project']['id'],
- self.project['id'])
- self.assertEqual(r.result['token']['project']['name'],
- self.project['name'])
+ self.assertEqual(self.user['id'], r.result['token']['user']['id'])
+ self.assertEqual(self.user['name'], r.result['token']['user']['name'])
+ self.assertEqual(self.domain['id'],
+ r.result['token']['user']['domain']['id'])
+ self.assertEqual(self.domain['name'],
+ r.result['token']['user']['domain']['name'])
+ self.assertEqual(self.project['id'],
+ r.result['token']['project']['id'])
+ self.assertEqual(self.project['name'],
+ r.result['token']['project']['name'])
def test_impersonation_token_cannot_create_new_trust(self):
ref = self.new_trust_ref(
@@ -3950,9 +4007,9 @@ class TestAuthContext(tests.TestCase):
self.auth_context = auth.controllers.AuthContext()
def test_pick_lowest_expires_at(self):
- expires_at_1 = timeutils.isotime(timeutils.utcnow())
- expires_at_2 = timeutils.isotime(timeutils.utcnow() +
- datetime.timedelta(seconds=10))
+ expires_at_1 = utils.isotime(timeutils.utcnow())
+ expires_at_2 = utils.isotime(timeutils.utcnow() +
+ datetime.timedelta(seconds=10))
# make sure auth_context picks the lowest value
self.auth_context['expires_at'] = expires_at_1
self.auth_context['expires_at'] = expires_at_2
@@ -4113,7 +4170,7 @@ class TestFernetTokenProvider(test_v3.RestfulTestCase):
trustor_user_id=self.user_id,
trustee_user_id=trustee_user['id'],
project_id=self.project_id,
- impersonation=True,
+ impersonation=False,
role_ids=[self.role_id])
# Create a trust
@@ -4123,9 +4180,7 @@ class TestFernetTokenProvider(test_v3.RestfulTestCase):
def config_overrides(self):
super(TestFernetTokenProvider, self).config_overrides()
- self.config_fixture.config(
- group='token',
- provider='keystone.token.providers.fernet.Provider')
+ self.config_fixture.config(group='token', provider='fernet')
def test_validate_unscoped_token(self):
unscoped_token = self._get_unscoped_token()
@@ -4135,7 +4190,7 @@ class TestFernetTokenProvider(test_v3.RestfulTestCase):
unscoped_token = self._get_unscoped_token()
tampered_token = (unscoped_token[:50] + uuid.uuid4().hex +
unscoped_token[50 + 32:])
- self._validate_token(tampered_token, expected_status=401)
+ self._validate_token(tampered_token, expected_status=404)
def test_revoke_unscoped_token(self):
unscoped_token = self._get_unscoped_token()
@@ -4215,7 +4270,7 @@ class TestFernetTokenProvider(test_v3.RestfulTestCase):
project_scoped_token = self._get_project_scoped_token()
tampered_token = (project_scoped_token[:50] + uuid.uuid4().hex +
project_scoped_token[50 + 32:])
- self._validate_token(tampered_token, expected_status=401)
+ self._validate_token(tampered_token, expected_status=404)
def test_revoke_project_scoped_token(self):
project_scoped_token = self._get_project_scoped_token()
@@ -4323,7 +4378,7 @@ class TestFernetTokenProvider(test_v3.RestfulTestCase):
# Get a trust scoped token
tampered_token = (trust_scoped_token[:50] + uuid.uuid4().hex +
trust_scoped_token[50 + 32:])
- self._validate_token(tampered_token, expected_status=401)
+ self._validate_token(tampered_token, expected_status=404)
def test_revoke_trust_scoped_token(self):
trustee_user, trust = self._create_trust()
@@ -4454,9 +4509,7 @@ class TestAuthFernetTokenProvider(TestAuth):
def config_overrides(self):
super(TestAuthFernetTokenProvider, self).config_overrides()
- self.config_fixture.config(
- group='token',
- provider='keystone.token.providers.fernet.Provider')
+ self.config_fixture.config(group='token', provider='fernet')
def test_verify_with_bound_token(self):
self.config_fixture.config(group='token', bind='kerberos')
diff --git a/keystone-moon/keystone/tests/unit/test_v3_catalog.py b/keystone-moon/keystone/tests/unit/test_v3_catalog.py
index d231b2e1..f96b2a12 100644
--- a/keystone-moon/keystone/tests/unit/test_v3_catalog.py
+++ b/keystone-moon/keystone/tests/unit/test_v3_catalog.py
@@ -15,6 +15,8 @@
import copy
import uuid
+from testtools import matchers
+
from keystone import catalog
from keystone.tests import unit as tests
from keystone.tests.unit.ksfixtures import database
@@ -154,7 +156,7 @@ class CatalogTestCase(test_v3.RestfulTestCase):
ref2 = self.new_region_ref()
del ref1['description']
- del ref2['description']
+ ref2['description'] = None
resp1 = self.post(
'/regions',
@@ -224,6 +226,39 @@ class CatalogTestCase(test_v3.RestfulTestCase):
body={'region': region})
self.assertValidRegionResponse(r, region)
+ def test_update_region_without_description_keeps_original(self):
+ """Call ``PATCH /regions/{region_id}``."""
+ region_ref = self.new_region_ref()
+
+ resp = self.post('/regions', body={'region': region_ref},
+ expected_status=201)
+
+ region_updates = {
+ # update with something that's not the description
+ 'parent_region_id': self.region_id,
+ }
+ resp = self.patch('/regions/%s' % region_ref['id'],
+ body={'region': region_updates},
+ expected_status=200)
+
+ # NOTE(dstanek): Keystone should keep the original description.
+ self.assertEqual(region_ref['description'],
+ resp.result['region']['description'])
+
+ def test_update_region_with_null_description(self):
+ """Call ``PATCH /regions/{region_id}``."""
+ region = self.new_region_ref()
+ del region['id']
+ region['description'] = None
+ r = self.patch('/regions/%(region_id)s' % {
+ 'region_id': self.region_id},
+ body={'region': region})
+
+ # NOTE(dstanek): Keystone should turn the provided None value into
+ # an empty string before storing in the backend.
+ region['description'] = ''
+ self.assertValidRegionResponse(r, region)
+
def test_delete_region(self):
"""Call ``DELETE /regions/{region_id}``."""
@@ -379,6 +414,133 @@ class CatalogTestCase(test_v3.RestfulTestCase):
r = self.get('/endpoints')
self.assertValidEndpointListResponse(r, ref=self.endpoint)
+ def _create_random_endpoint(self, interface='public',
+ parent_region_id=None):
+ region = self._create_region_with_parent_id(
+ parent_id=parent_region_id)
+ service = self._create_random_service()
+ ref = self.new_endpoint_ref(
+ service_id=service['id'],
+ interface=interface,
+ region_id=region.result['region']['id'])
+
+ response = self.post(
+ '/endpoints',
+ body={'endpoint': ref})
+ return response.json['endpoint']
+
+ def test_list_endpoints_filtered_by_interface(self):
+ """Call ``GET /endpoints?interface={interface}``."""
+ ref = self._create_random_endpoint(interface='internal')
+
+ response = self.get('/endpoints?interface=%s' % ref['interface'])
+ self.assertValidEndpointListResponse(response, ref=ref)
+
+ for endpoint in response.json['endpoints']:
+ self.assertEqual(ref['interface'], endpoint['interface'])
+
+ def test_list_endpoints_filtered_by_service_id(self):
+ """Call ``GET /endpoints?service_id={service_id}``."""
+ ref = self._create_random_endpoint()
+
+ response = self.get('/endpoints?service_id=%s' % ref['service_id'])
+ self.assertValidEndpointListResponse(response, ref=ref)
+
+ for endpoint in response.json['endpoints']:
+ self.assertEqual(ref['service_id'], endpoint['service_id'])
+
+ def test_list_endpoints_filtered_by_region_id(self):
+ """Call ``GET /endpoints?region_id={region_id}``."""
+ ref = self._create_random_endpoint()
+
+ response = self.get('/endpoints?region_id=%s' % ref['region_id'])
+ self.assertValidEndpointListResponse(response, ref=ref)
+
+ for endpoint in response.json['endpoints']:
+ self.assertEqual(ref['region_id'], endpoint['region_id'])
+
+ def test_list_endpoints_filtered_by_parent_region_id(self):
+ """Call ``GET /endpoints?region_id={region_id}``.
+
+ Ensure passing the parent_region_id as filter returns an
+ empty list.
+
+ """
+ parent_region = self._create_region_with_parent_id()
+ parent_region_id = parent_region.result['region']['id']
+ self._create_random_endpoint(parent_region_id=parent_region_id)
+
+ response = self.get('/endpoints?region_id=%s' % parent_region_id)
+ self.assertEqual(0, len(response.json['endpoints']))
+
+ def test_list_endpoints_with_multiple_filters(self):
+ """Call ``GET /endpoints?interface={interface}...``.
+
+ Ensure passing different combinations of interface, region_id and
+ service_id as filters will return the correct result.
+
+ """
+ # interface and region_id specified
+ ref = self._create_random_endpoint(interface='internal')
+ response = self.get('/endpoints?interface=%s&region_id=%s' %
+ (ref['interface'], ref['region_id']))
+ self.assertValidEndpointListResponse(response, ref=ref)
+
+ for endpoint in response.json['endpoints']:
+ self.assertEqual(ref['interface'], endpoint['interface'])
+ self.assertEqual(ref['region_id'], endpoint['region_id'])
+
+ # interface and service_id specified
+ ref = self._create_random_endpoint(interface='internal')
+ response = self.get('/endpoints?interface=%s&service_id=%s' %
+ (ref['interface'], ref['service_id']))
+ self.assertValidEndpointListResponse(response, ref=ref)
+
+ for endpoint in response.json['endpoints']:
+ self.assertEqual(ref['interface'], endpoint['interface'])
+ self.assertEqual(ref['service_id'], endpoint['service_id'])
+
+ # region_id and service_id specified
+ ref = self._create_random_endpoint(interface='internal')
+ response = self.get('/endpoints?region_id=%s&service_id=%s' %
+ (ref['region_id'], ref['service_id']))
+ self.assertValidEndpointListResponse(response, ref=ref)
+
+ for endpoint in response.json['endpoints']:
+ self.assertEqual(ref['region_id'], endpoint['region_id'])
+ self.assertEqual(ref['service_id'], endpoint['service_id'])
+
+ # interface, region_id and service_id specified
+ ref = self._create_random_endpoint(interface='internal')
+ response = self.get(('/endpoints?interface=%s&region_id=%s'
+ '&service_id=%s') %
+ (ref['interface'], ref['region_id'],
+ ref['service_id']))
+ self.assertValidEndpointListResponse(response, ref=ref)
+
+ for endpoint in response.json['endpoints']:
+ self.assertEqual(ref['interface'], endpoint['interface'])
+ self.assertEqual(ref['region_id'], endpoint['region_id'])
+ self.assertEqual(ref['service_id'], endpoint['service_id'])
+
+ def test_list_endpoints_with_random_filter_values(self):
+ """Call ``GET /endpoints?interface={interface}...``.
+
+ Ensure passing random values for: interface, region_id and
+ service_id will return an empty list.
+
+ """
+ self._create_random_endpoint(interface='internal')
+
+ response = self.get('/endpoints?interface=%s' % uuid.uuid4().hex)
+ self.assertEqual(0, len(response.json['endpoints']))
+
+ response = self.get('/endpoints?region_id=%s' % uuid.uuid4().hex)
+ self.assertEqual(0, len(response.json['endpoints']))
+
+ response = self.get('/endpoints?service_id=%s' % uuid.uuid4().hex)
+ self.assertEqual(0, len(response.json['endpoints']))
+
def test_create_endpoint_no_enabled(self):
"""Call ``POST /endpoints``."""
ref = self.new_endpoint_ref(service_id=self.service_id)
@@ -582,6 +744,62 @@ class CatalogTestCase(test_v3.RestfulTestCase):
self.assertEqual(endpoint_v2['region'], endpoint_v3['region_id'])
+ def test_deleting_endpoint_with_space_in_url(self):
+ # create a v3 endpoint ref
+ ref = self.new_endpoint_ref(service_id=self.service['id'])
+
+ # add a space to all urls (intentional "i d" to test bug)
+ url_with_space = "http://127.0.0.1:8774 /v1.1/\$(tenant_i d)s"
+ ref['publicurl'] = url_with_space
+ ref['internalurl'] = url_with_space
+ ref['adminurl'] = url_with_space
+ ref['url'] = url_with_space
+
+ # add the endpoint to the database
+ self.catalog_api.create_endpoint(ref['id'], ref)
+
+ # delete the endpoint
+ self.delete('/endpoints/%s' % ref['id'])
+
+ # make sure it's deleted (GET should return 404)
+ self.get('/endpoints/%s' % ref['id'], expected_status=404)
+
+ def test_endpoint_create_with_valid_url(self):
+ """Create endpoint with valid url should be tested,too."""
+ # list one valid url is enough, no need to list too much
+ valid_url = 'http://127.0.0.1:8774/v1.1/$(tenant_id)s'
+
+ ref = self.new_endpoint_ref(self.service_id)
+ ref['url'] = valid_url
+ self.post('/endpoints',
+ body={'endpoint': ref},
+ expected_status=201)
+
+ def test_endpoint_create_with_invalid_url(self):
+ """Test the invalid cases: substitutions is not exactly right.
+ """
+ invalid_urls = [
+ # using a substitution that is not whitelisted - KeyError
+ 'http://127.0.0.1:8774/v1.1/$(nonexistent)s',
+
+ # invalid formatting - ValueError
+ 'http://127.0.0.1:8774/v1.1/$(tenant_id)',
+ 'http://127.0.0.1:8774/v1.1/$(tenant_id)t',
+ 'http://127.0.0.1:8774/v1.1/$(tenant_id',
+
+ # invalid type specifier - TypeError
+ # admin_url is a string not an int
+ 'http://127.0.0.1:8774/v1.1/$(admin_url)d',
+ ]
+
+ ref = self.new_endpoint_ref(self.service_id)
+
+ for invalid_url in invalid_urls:
+ ref['url'] = invalid_url
+ self.post('/endpoints',
+ body={'endpoint': ref},
+ expected_status=400)
+
class TestCatalogAPISQL(tests.TestCase):
"""Tests for the catalog Manager against the SQL backend.
@@ -602,9 +820,7 @@ class TestCatalogAPISQL(tests.TestCase):
def config_overrides(self):
super(TestCatalogAPISQL, self).config_overrides()
- self.config_fixture.config(
- group='catalog',
- driver='keystone.catalog.backends.sql.Catalog')
+ self.config_fixture.config(group='catalog', driver='sql')
def new_endpoint_ref(self, service_id):
return {
@@ -643,6 +859,20 @@ class TestCatalogAPISQL(tests.TestCase):
# all three appear in the backend
self.assertEqual(3, len(self.catalog_api.list_endpoints()))
+ # create another valid endpoint - tenant_id will be replaced
+ ref = self.new_endpoint_ref(self.service_id)
+ ref['url'] = 'http://keystone/%(tenant_id)s'
+ self.catalog_api.create_endpoint(ref['id'], ref)
+
+ # there are two valid endpoints, positive check
+ catalog = self.catalog_api.get_v3_catalog(user_id, tenant_id)
+ self.assertThat(catalog[0]['endpoints'], matchers.HasLength(2))
+
+ # If the URL has no 'tenant_id' to substitute, we will skip the
+ # endpoint which contains this kind of URL, negative check.
+ catalog = self.catalog_api.get_v3_catalog(user_id, tenant_id=None)
+ self.assertThat(catalog[0]['endpoints'], matchers.HasLength(1))
+
def test_get_catalog_always_returns_service_name(self):
user_id = uuid.uuid4().hex
tenant_id = uuid.uuid4().hex
@@ -691,9 +921,7 @@ class TestCatalogAPISQLRegions(tests.TestCase):
def config_overrides(self):
super(TestCatalogAPISQLRegions, self).config_overrides()
- self.config_fixture.config(
- group='catalog',
- driver='keystone.catalog.backends.sql.Catalog')
+ self.config_fixture.config(group='catalog', driver='sql')
def new_endpoint_ref(self, service_id):
return {
diff --git a/keystone-moon/keystone/tests/unit/test_v3_controller.py b/keystone-moon/keystone/tests/unit/test_v3_controller.py
index 3ac4ba5a..eef64a82 100644
--- a/keystone-moon/keystone/tests/unit/test_v3_controller.py
+++ b/keystone-moon/keystone/tests/unit/test_v3_controller.py
@@ -15,6 +15,7 @@
import uuid
import six
+from six.moves import range
from testtools import matchers
from keystone.common import controller
diff --git a/keystone-moon/keystone/tests/unit/test_v3_credential.py b/keystone-moon/keystone/tests/unit/test_v3_credential.py
index d792b216..f8f6d35b 100644
--- a/keystone-moon/keystone/tests/unit/test_v3_credential.py
+++ b/keystone-moon/keystone/tests/unit/test_v3_credential.py
@@ -18,6 +18,7 @@ import uuid
from keystoneclient.contrib.ec2 import utils as ec2_utils
from oslo_config import cfg
+from testtools import matchers
from keystone import exception
from keystone.tests.unit import test_v3
@@ -375,14 +376,17 @@ class TestCredentialEc2(CredentialBaseTestCase):
self.assertIsNone(ec2_cred['trust_id'])
self._validate_signature(access=ec2_cred['access'],
secret=ec2_cred['secret'])
-
- return ec2_cred
+ uri = '/'.join([self._get_ec2_cred_uri(), ec2_cred['access']])
+ self.assertThat(ec2_cred['links']['self'],
+ matchers.EndsWith(uri))
def test_ec2_get_credential(self):
ec2_cred = self._get_ec2_cred()
uri = '/'.join([self._get_ec2_cred_uri(), ec2_cred['access']])
r = self.get(uri)
self.assertDictEqual(ec2_cred, r.result['credential'])
+ self.assertThat(ec2_cred['links']['self'],
+ matchers.EndsWith(uri))
def test_ec2_list_credentials(self):
"""Test ec2 credential listing."""
@@ -391,6 +395,8 @@ class TestCredentialEc2(CredentialBaseTestCase):
r = self.get(uri)
cred_list = r.result['credentials']
self.assertEqual(1, len(cred_list))
+ self.assertThat(r.result['links']['self'],
+ matchers.EndsWith(uri))
def test_ec2_delete_credential(self):
"""Test ec2 credential deletion."""
diff --git a/keystone-moon/keystone/tests/unit/test_v3_endpoint_policy.py b/keystone-moon/keystone/tests/unit/test_v3_endpoint_policy.py
index 437fb155..4daeff4d 100644
--- a/keystone-moon/keystone/tests/unit/test_v3_endpoint_policy.py
+++ b/keystone-moon/keystone/tests/unit/test_v3_endpoint_policy.py
@@ -17,13 +17,7 @@ from testtools import matchers
from keystone.tests.unit import test_v3
-class TestExtensionCase(test_v3.RestfulTestCase):
-
- EXTENSION_NAME = 'endpoint_policy'
- EXTENSION_TO_ADD = 'endpoint_policy_extension'
-
-
-class EndpointPolicyTestCase(TestExtensionCase):
+class EndpointPolicyTestCase(test_v3.RestfulTestCase):
"""Test endpoint policy CRUD.
In general, the controller layer of the endpoint policy extension is really
@@ -203,7 +197,7 @@ class EndpointPolicyTestCase(TestExtensionCase):
self.head(url, expected_status=404)
-class JsonHomeTests(TestExtensionCase, test_v3.JsonHomeTestMixin):
+class JsonHomeTests(test_v3.JsonHomeTestMixin):
EXTENSION_LOCATION = ('http://docs.openstack.org/api/openstack-identity/3/'
'ext/OS-ENDPOINT-POLICY/1.0/rel')
PARAM_LOCATION = 'http://docs.openstack.org/api/openstack-identity/3/param'
diff --git a/keystone-moon/keystone/tests/unit/test_v3_federation.py b/keystone-moon/keystone/tests/unit/test_v3_federation.py
index 3b6f4d8b..e646bc0a 100644
--- a/keystone-moon/keystone/tests/unit/test_v3_federation.py
+++ b/keystone-moon/keystone/tests/unit/test_v3_federation.py
@@ -13,26 +13,27 @@
import os
import random
import subprocess
+from testtools import matchers
import uuid
+import fixtures
from lxml import etree
import mock
from oslo_config import cfg
from oslo_log import log
-from oslo_serialization import jsonutils
+from oslo_utils import importutils
from oslotest import mockpatch
import saml2
from saml2 import saml
from saml2 import sigver
-from six.moves import urllib
-import xmldsig
+from six.moves import range, urllib, zip
+xmldsig = importutils.try_import("saml2.xmldsig")
+if not xmldsig:
+ xmldsig = importutils.try_import("xmldsig")
from keystone.auth import controllers as auth_controllers
-from keystone.auth.plugins import mapped
-from keystone.contrib import federation
from keystone.contrib.federation import controllers as federation_controllers
from keystone.contrib.federation import idp as keystone_idp
-from keystone.contrib.federation import utils as mapping_utils
from keystone import exception
from keystone import notifications
from keystone.tests.unit import core
@@ -68,7 +69,7 @@ class FederatedSetupMixin(object):
USER = 'user@ORGANIZATION'
ASSERTION_PREFIX = 'PREFIX_'
IDP_WITH_REMOTE = 'ORG_IDP_REMOTE'
- REMOTE_ID = 'entityID_IDP'
+ REMOTE_IDS = ['entityID_IDP1', 'entityID_IDP2']
REMOTE_ID_ATTR = uuid.uuid4().hex
UNSCOPED_V3_SAML2_REQ = {
@@ -108,14 +109,14 @@ class FederatedSetupMixin(object):
self.assertEqual(token_projects, projects_ref)
def _check_scoped_token_attributes(self, token):
- def xor_project_domain(iterable):
- return sum(('project' in iterable, 'domain' in iterable)) % 2
+ def xor_project_domain(token_keys):
+ return sum(('project' in token_keys, 'domain' in token_keys)) % 2
for obj in ('user', 'catalog', 'expires_at', 'issued_at',
'methods', 'roles'):
self.assertIn(obj, token)
# Check for either project or domain
- if not xor_project_domain(token.keys()):
+ if not xor_project_domain(list(token.keys())):
raise AssertionError("You must specify either"
"project or domain.")
@@ -123,6 +124,10 @@ class FederatedSetupMixin(object):
os_federation = token['user']['OS-FEDERATION']
self.assertEqual(self.IDP, os_federation['identity_provider']['id'])
self.assertEqual(self.PROTOCOL, os_federation['protocol']['id'])
+ self.assertListEqual(sorted(['groups',
+ 'identity_provider',
+ 'protocol']),
+ sorted(os_federation.keys()))
def _issue_unscoped_token(self,
idp=None,
@@ -327,7 +332,8 @@ class FederatedSetupMixin(object):
},
{
'user': {
- 'name': '{0}'
+ 'name': '{0}',
+ 'id': '{1}'
}
}
],
@@ -336,6 +342,9 @@ class FederatedSetupMixin(object):
'type': 'UserName'
},
{
+ 'type': 'Email',
+ },
+ {
'type': 'orgPersonType',
'any_one_of': [
'Employee'
@@ -352,7 +361,8 @@ class FederatedSetupMixin(object):
},
{
'user': {
- 'name': '{0}'
+ 'name': '{0}',
+ 'id': '{1}'
}
}
],
@@ -361,6 +371,9 @@ class FederatedSetupMixin(object):
'type': self.ASSERTION_PREFIX + 'UserName'
},
{
+ 'type': self.ASSERTION_PREFIX + 'Email',
+ },
+ {
'type': self.ASSERTION_PREFIX + 'orgPersonType',
'any_one_of': [
'SuperEmployee'
@@ -377,7 +390,8 @@ class FederatedSetupMixin(object):
},
{
'user': {
- 'name': '{0}'
+ 'name': '{0}',
+ 'id': '{1}'
}
}
],
@@ -386,6 +400,9 @@ class FederatedSetupMixin(object):
'type': 'UserName'
},
{
+ 'type': 'Email'
+ },
+ {
'type': 'orgPersonType',
'any_one_of': [
'Customer'
@@ -413,7 +430,8 @@ class FederatedSetupMixin(object):
{
'user': {
- 'name': '{0}'
+ 'name': '{0}',
+ 'id': '{1}'
}
}
],
@@ -422,6 +440,9 @@ class FederatedSetupMixin(object):
'type': 'UserName'
},
{
+ 'type': 'Email'
+ },
+ {
'type': 'orgPersonType',
'any_one_of': [
'Admin',
@@ -444,7 +465,8 @@ class FederatedSetupMixin(object):
},
{
'user': {
- 'name': '{0}'
+ 'name': '{0}',
+ 'id': '{1}'
}
}
],
@@ -453,6 +475,9 @@ class FederatedSetupMixin(object):
'type': 'UserName',
},
{
+ 'type': 'Email',
+ },
+ {
'type': 'FirstName',
'any_one_of': [
'Jill'
@@ -475,7 +500,8 @@ class FederatedSetupMixin(object):
},
{
'user': {
- 'name': '{0}'
+ 'name': '{0}',
+ 'id': '{1}'
}
}
],
@@ -485,6 +511,9 @@ class FederatedSetupMixin(object):
},
{
'type': 'Email',
+ },
+ {
+ 'type': 'Email',
'any_one_of': [
'testacct@example.com'
]
@@ -502,7 +531,8 @@ class FederatedSetupMixin(object):
"local": [
{
'user': {
- 'name': '{0}'
+ 'name': '{0}',
+ 'id': '{1}'
}
},
{
@@ -519,6 +549,9 @@ class FederatedSetupMixin(object):
'type': 'UserName',
},
{
+ 'type': 'Email',
+ },
+ {
"type": "orgPersonType",
"any_one_of": [
"CEO",
@@ -531,7 +564,8 @@ class FederatedSetupMixin(object):
"local": [
{
'user': {
- 'name': '{0}'
+ 'name': '{0}',
+ 'id': '{1}'
}
},
{
@@ -548,6 +582,9 @@ class FederatedSetupMixin(object):
"type": "UserName",
},
{
+ "type": "Email",
+ },
+ {
"type": "orgPersonType",
"any_one_of": [
"Managers"
@@ -559,7 +596,8 @@ class FederatedSetupMixin(object):
"local": [
{
"user": {
- "name": "{0}"
+ "name": "{0}",
+ "id": "{1}"
}
},
{
@@ -576,6 +614,9 @@ class FederatedSetupMixin(object):
"type": "UserName",
},
{
+ "type": "Email",
+ },
+ {
"type": "UserName",
"any_one_of": [
"IamTester"
@@ -639,7 +680,7 @@ class FederatedSetupMixin(object):
self.idp)
# Add IDP with remote
self.idp_with_remote = self.idp_ref(id=self.IDP_WITH_REMOTE)
- self.idp_with_remote['remote_id'] = self.REMOTE_ID
+ self.idp_with_remote['remote_ids'] = self.REMOTE_IDS
self.federation_api.create_idp(self.idp_with_remote['id'],
self.idp_with_remote)
# Add a mapping
@@ -793,28 +834,137 @@ class FederatedIdentityProviderTests(FederationTests):
return r
def test_create_idp(self):
- """Creates the IdentityProvider entity."""
+ """Creates the IdentityProvider entity associated to remote_ids."""
- keys_to_check = self.idp_keys
- body = self._http_idp_input()
+ keys_to_check = list(self.idp_keys)
+ body = self.default_body.copy()
+ body['description'] = uuid.uuid4().hex
resp = self._create_default_idp(body=body)
self.assertValidResponse(resp, 'identity_provider', dummy_validator,
keys_to_check=keys_to_check,
ref=body)
def test_create_idp_remote(self):
- """Creates the IdentityProvider entity associated to a remote_id."""
+ """Creates the IdentityProvider entity associated to remote_ids."""
keys_to_check = list(self.idp_keys)
- keys_to_check.append('remote_id')
+ keys_to_check.append('remote_ids')
body = self.default_body.copy()
body['description'] = uuid.uuid4().hex
- body['remote_id'] = uuid.uuid4().hex
+ body['remote_ids'] = [uuid.uuid4().hex,
+ uuid.uuid4().hex,
+ uuid.uuid4().hex]
resp = self._create_default_idp(body=body)
self.assertValidResponse(resp, 'identity_provider', dummy_validator,
keys_to_check=keys_to_check,
ref=body)
+ def test_create_idp_remote_repeated(self):
+ """Creates two IdentityProvider entities with some remote_ids
+
+ A remote_id is the same for both so the second IdP is not
+ created because of the uniqueness of the remote_ids
+
+ Expect HTTP 409 code for the latter call.
+
+ """
+
+ body = self.default_body.copy()
+ repeated_remote_id = uuid.uuid4().hex
+ body['remote_ids'] = [uuid.uuid4().hex,
+ uuid.uuid4().hex,
+ uuid.uuid4().hex,
+ repeated_remote_id]
+ self._create_default_idp(body=body)
+
+ url = self.base_url(suffix=uuid.uuid4().hex)
+ body['remote_ids'] = [uuid.uuid4().hex,
+ repeated_remote_id]
+ self.put(url, body={'identity_provider': body},
+ expected_status=409)
+
+ def test_create_idp_remote_empty(self):
+ """Creates an IdP with empty remote_ids."""
+
+ keys_to_check = list(self.idp_keys)
+ keys_to_check.append('remote_ids')
+ body = self.default_body.copy()
+ body['description'] = uuid.uuid4().hex
+ body['remote_ids'] = []
+ resp = self._create_default_idp(body=body)
+ self.assertValidResponse(resp, 'identity_provider', dummy_validator,
+ keys_to_check=keys_to_check,
+ ref=body)
+
+ def test_create_idp_remote_none(self):
+ """Creates an IdP with a None remote_ids."""
+
+ keys_to_check = list(self.idp_keys)
+ keys_to_check.append('remote_ids')
+ body = self.default_body.copy()
+ body['description'] = uuid.uuid4().hex
+ body['remote_ids'] = None
+ resp = self._create_default_idp(body=body)
+ expected = body.copy()
+ expected['remote_ids'] = []
+ self.assertValidResponse(resp, 'identity_provider', dummy_validator,
+ keys_to_check=keys_to_check,
+ ref=expected)
+
+ def test_update_idp_remote_ids(self):
+ """Update IdP's remote_ids parameter."""
+ body = self.default_body.copy()
+ body['remote_ids'] = [uuid.uuid4().hex]
+ default_resp = self._create_default_idp(body=body)
+ default_idp = self._fetch_attribute_from_response(default_resp,
+ 'identity_provider')
+ idp_id = default_idp.get('id')
+ url = self.base_url(suffix=idp_id)
+ self.assertIsNotNone(idp_id)
+
+ body['remote_ids'] = [uuid.uuid4().hex, uuid.uuid4().hex]
+
+ body = {'identity_provider': body}
+ resp = self.patch(url, body=body)
+ updated_idp = self._fetch_attribute_from_response(resp,
+ 'identity_provider')
+ body = body['identity_provider']
+ self.assertEqual(sorted(body['remote_ids']),
+ sorted(updated_idp.get('remote_ids')))
+
+ resp = self.get(url)
+ returned_idp = self._fetch_attribute_from_response(resp,
+ 'identity_provider')
+ self.assertEqual(sorted(body['remote_ids']),
+ sorted(returned_idp.get('remote_ids')))
+
+ def test_update_idp_clean_remote_ids(self):
+ """Update IdP's remote_ids parameter with an empty list."""
+ body = self.default_body.copy()
+ body['remote_ids'] = [uuid.uuid4().hex]
+ default_resp = self._create_default_idp(body=body)
+ default_idp = self._fetch_attribute_from_response(default_resp,
+ 'identity_provider')
+ idp_id = default_idp.get('id')
+ url = self.base_url(suffix=idp_id)
+ self.assertIsNotNone(idp_id)
+
+ body['remote_ids'] = []
+
+ body = {'identity_provider': body}
+ resp = self.patch(url, body=body)
+ updated_idp = self._fetch_attribute_from_response(resp,
+ 'identity_provider')
+ body = body['identity_provider']
+ self.assertEqual(sorted(body['remote_ids']),
+ sorted(updated_idp.get('remote_ids')))
+
+ resp = self.get(url)
+ returned_idp = self._fetch_attribute_from_response(resp,
+ 'identity_provider')
+ self.assertEqual(sorted(body['remote_ids']),
+ sorted(returned_idp.get('remote_ids')))
+
def test_list_idps(self, iterations=5):
"""Lists all available IdentityProviders.
@@ -899,6 +1049,33 @@ class FederatedIdentityProviderTests(FederationTests):
self.delete(url)
self.get(url, expected_status=404)
+ def test_delete_idp_also_deletes_assigned_protocols(self):
+ """Deleting an IdP will delete its assigned protocol."""
+
+ # create default IdP
+ default_resp = self._create_default_idp()
+ default_idp = self._fetch_attribute_from_response(default_resp,
+ 'identity_provider')
+ idp_id = default_idp['id']
+ protocol_id = uuid.uuid4().hex
+
+ url = self.base_url(suffix='%(idp_id)s/protocols/%(protocol_id)s')
+ idp_url = self.base_url(suffix=idp_id)
+
+ # assign protocol to IdP
+ kwargs = {'expected_status': 201}
+ resp, idp_id, proto = self._assign_protocol_to_idp(
+ url=url,
+ idp_id=idp_id,
+ proto=protocol_id,
+ **kwargs)
+
+ # removing IdP will remove the assigned protocol as well
+ self.assertEqual(1, len(self.federation_api.list_protocols(idp_id)))
+ self.delete(idp_url)
+ self.get(idp_url, expected_status=404)
+ self.assertEqual(0, len(self.federation_api.list_protocols(idp_id)))
+
def test_delete_nonexisting_idp(self):
"""Delete nonexisting IdP.
@@ -918,7 +1095,7 @@ class FederatedIdentityProviderTests(FederationTests):
self.assertIsNotNone(idp_id)
_enabled = not default_idp.get('enabled')
- body = {'remote_id': uuid.uuid4().hex,
+ body = {'remote_ids': [uuid.uuid4().hex, uuid.uuid4().hex],
'description': uuid.uuid4().hex,
'enabled': _enabled}
@@ -928,13 +1105,21 @@ class FederatedIdentityProviderTests(FederationTests):
'identity_provider')
body = body['identity_provider']
for key in body.keys():
- self.assertEqual(body[key], updated_idp.get(key))
+ if isinstance(body[key], list):
+ self.assertEqual(sorted(body[key]),
+ sorted(updated_idp.get(key)))
+ else:
+ self.assertEqual(body[key], updated_idp.get(key))
resp = self.get(url)
updated_idp = self._fetch_attribute_from_response(resp,
'identity_provider')
for key in body.keys():
- self.assertEqual(body[key], updated_idp.get(key))
+ if isinstance(body[key], list):
+ self.assertEqual(sorted(body[key]),
+ sorted(updated_idp.get(key)))
+ else:
+ self.assertEqual(body[key], updated_idp.get(key))
def test_update_idp_immutable_attributes(self):
"""Update IdP's immutable parameters.
@@ -1126,7 +1311,7 @@ class MappingCRUDTests(FederationTests):
self.assertIsNotNone(entity.get('id'))
self.assertIsNotNone(entity.get('rules'))
if ref:
- self.assertEqual(jsonutils.loads(entity['rules']), ref['rules'])
+ self.assertEqual(entity['rules'], ref['rules'])
return entity
def _create_default_mapping_entry(self):
@@ -1262,594 +1447,11 @@ class MappingCRUDTests(FederationTests):
self.put(url, expected_status=400, body={'mapping': mapping})
-class MappingRuleEngineTests(FederationTests):
- """A class for testing the mapping rule engine."""
-
- def assertValidMappedUserObject(self, mapped_properties,
- user_type='ephemeral',
- domain_id=None):
- """Check whether mapped properties object has 'user' within.
-
- According to today's rules, RuleProcessor does not have to issue user's
- id or name. What's actually required is user's type and for ephemeral
- users that would be service domain named 'Federated'.
- """
- self.assertIn('user', mapped_properties,
- message='Missing user object in mapped properties')
- user = mapped_properties['user']
- self.assertIn('type', user)
- self.assertEqual(user_type, user['type'])
- self.assertIn('domain', user)
- domain = user['domain']
- domain_name_or_id = domain.get('id') or domain.get('name')
- domain_ref = domain_id or federation.FEDERATED_DOMAIN_KEYWORD
- self.assertEqual(domain_ref, domain_name_or_id)
-
- def test_rule_engine_any_one_of_and_direct_mapping(self):
- """Should return user's name and group id EMPLOYEE_GROUP_ID.
-
- The ADMIN_ASSERTION should successfully have a match in MAPPING_LARGE.
- They will test the case where `any_one_of` is valid, and there is
- a direct mapping for the users name.
-
- """
-
- mapping = mapping_fixtures.MAPPING_LARGE
- assertion = mapping_fixtures.ADMIN_ASSERTION
- rp = mapping_utils.RuleProcessor(mapping['rules'])
- values = rp.process(assertion)
-
- fn = assertion.get('FirstName')
- ln = assertion.get('LastName')
- full_name = '%s %s' % (fn, ln)
- group_ids = values.get('group_ids')
- user_name = values.get('user', {}).get('name')
-
- self.assertIn(mapping_fixtures.EMPLOYEE_GROUP_ID, group_ids)
- self.assertEqual(full_name, user_name)
-
- def test_rule_engine_no_regex_match(self):
- """Should deny authorization, the email of the tester won't match.
-
- This will not match since the email in the assertion will fail
- the regex test. It is set to match any @example.com address.
- But the incoming value is set to eviltester@example.org.
- RuleProcessor should return list of empty group_ids.
-
- """
-
- mapping = mapping_fixtures.MAPPING_LARGE
- assertion = mapping_fixtures.BAD_TESTER_ASSERTION
- rp = mapping_utils.RuleProcessor(mapping['rules'])
- mapped_properties = rp.process(assertion)
-
- self.assertValidMappedUserObject(mapped_properties)
- self.assertIsNone(mapped_properties['user'].get('name'))
- self.assertListEqual(list(), mapped_properties['group_ids'])
-
- def test_rule_engine_regex_many_groups(self):
- """Should return group CONTRACTOR_GROUP_ID.
-
- The TESTER_ASSERTION should successfully have a match in
- MAPPING_TESTER_REGEX. This will test the case where many groups
- are in the assertion, and a regex value is used to try and find
- a match.
-
- """
-
- mapping = mapping_fixtures.MAPPING_TESTER_REGEX
- assertion = mapping_fixtures.TESTER_ASSERTION
- rp = mapping_utils.RuleProcessor(mapping['rules'])
- values = rp.process(assertion)
-
- self.assertValidMappedUserObject(values)
- user_name = assertion.get('UserName')
- group_ids = values.get('group_ids')
- name = values.get('user', {}).get('name')
-
- self.assertEqual(user_name, name)
- self.assertIn(mapping_fixtures.TESTER_GROUP_ID, group_ids)
-
- def test_rule_engine_any_one_of_many_rules(self):
- """Should return group CONTRACTOR_GROUP_ID.
-
- The CONTRACTOR_ASSERTION should successfully have a match in
- MAPPING_SMALL. This will test the case where many rules
- must be matched, including an `any_one_of`, and a direct
- mapping.
-
- """
-
- mapping = mapping_fixtures.MAPPING_SMALL
- assertion = mapping_fixtures.CONTRACTOR_ASSERTION
- rp = mapping_utils.RuleProcessor(mapping['rules'])
- values = rp.process(assertion)
-
- self.assertValidMappedUserObject(values)
- user_name = assertion.get('UserName')
- group_ids = values.get('group_ids')
- name = values.get('user', {}).get('name')
-
- self.assertEqual(user_name, name)
- self.assertIn(mapping_fixtures.CONTRACTOR_GROUP_ID, group_ids)
-
- def test_rule_engine_not_any_of_and_direct_mapping(self):
- """Should return user's name and email.
-
- The CUSTOMER_ASSERTION should successfully have a match in
- MAPPING_LARGE. This will test the case where a requirement
- has `not_any_of`, and direct mapping to a username, no group.
-
- """
-
- mapping = mapping_fixtures.MAPPING_LARGE
- assertion = mapping_fixtures.CUSTOMER_ASSERTION
- rp = mapping_utils.RuleProcessor(mapping['rules'])
- values = rp.process(assertion)
-
- self.assertValidMappedUserObject(values)
- user_name = assertion.get('UserName')
- group_ids = values.get('group_ids')
- name = values.get('user', {}).get('name')
-
- self.assertEqual(user_name, name)
- self.assertEqual([], group_ids,)
-
- def test_rule_engine_not_any_of_many_rules(self):
- """Should return group EMPLOYEE_GROUP_ID.
-
- The EMPLOYEE_ASSERTION should successfully have a match in
- MAPPING_SMALL. This will test the case where many remote
- rules must be matched, including a `not_any_of`.
-
- """
-
- mapping = mapping_fixtures.MAPPING_SMALL
- assertion = mapping_fixtures.EMPLOYEE_ASSERTION
- rp = mapping_utils.RuleProcessor(mapping['rules'])
- values = rp.process(assertion)
-
- self.assertValidMappedUserObject(values)
- user_name = assertion.get('UserName')
- group_ids = values.get('group_ids')
- name = values.get('user', {}).get('name')
-
- self.assertEqual(user_name, name)
- self.assertIn(mapping_fixtures.EMPLOYEE_GROUP_ID, group_ids)
-
- def test_rule_engine_not_any_of_regex_verify_pass(self):
- """Should return group DEVELOPER_GROUP_ID.
-
- The DEVELOPER_ASSERTION should successfully have a match in
- MAPPING_DEVELOPER_REGEX. This will test the case where many
- remote rules must be matched, including a `not_any_of`, with
- regex set to True.
-
- """
-
- mapping = mapping_fixtures.MAPPING_DEVELOPER_REGEX
- assertion = mapping_fixtures.DEVELOPER_ASSERTION
- rp = mapping_utils.RuleProcessor(mapping['rules'])
- values = rp.process(assertion)
-
- self.assertValidMappedUserObject(values)
- user_name = assertion.get('UserName')
- group_ids = values.get('group_ids')
- name = values.get('user', {}).get('name')
-
- self.assertEqual(user_name, name)
- self.assertIn(mapping_fixtures.DEVELOPER_GROUP_ID, group_ids)
-
- def test_rule_engine_not_any_of_regex_verify_fail(self):
- """Should deny authorization.
-
- The email in the assertion will fail the regex test.
- It is set to reject any @example.org address, but the
- incoming value is set to evildeveloper@example.org.
- RuleProcessor should return list of empty group_ids.
-
- """
-
- mapping = mapping_fixtures.MAPPING_DEVELOPER_REGEX
- assertion = mapping_fixtures.BAD_DEVELOPER_ASSERTION
- rp = mapping_utils.RuleProcessor(mapping['rules'])
- mapped_properties = rp.process(assertion)
-
- self.assertValidMappedUserObject(mapped_properties)
- self.assertIsNone(mapped_properties['user'].get('name'))
- self.assertListEqual(list(), mapped_properties['group_ids'])
-
- def _rule_engine_regex_match_and_many_groups(self, assertion):
- """Should return group DEVELOPER_GROUP_ID and TESTER_GROUP_ID.
-
- A helper function injecting assertion passed as an argument.
- Expect DEVELOPER_GROUP_ID and TESTER_GROUP_ID in the results.
-
- """
-
- mapping = mapping_fixtures.MAPPING_LARGE
- rp = mapping_utils.RuleProcessor(mapping['rules'])
- values = rp.process(assertion)
-
- user_name = assertion.get('UserName')
- group_ids = values.get('group_ids')
- name = values.get('user', {}).get('name')
-
- self.assertValidMappedUserObject(values)
- self.assertEqual(user_name, name)
- self.assertIn(mapping_fixtures.DEVELOPER_GROUP_ID, group_ids)
- self.assertIn(mapping_fixtures.TESTER_GROUP_ID, group_ids)
-
- def test_rule_engine_regex_match_and_many_groups(self):
- """Should return group DEVELOPER_GROUP_ID and TESTER_GROUP_ID.
-
- The TESTER_ASSERTION should successfully have a match in
- MAPPING_LARGE. This will test a successful regex match
- for an `any_one_of` evaluation type, and will have many
- groups returned.
-
- """
- self._rule_engine_regex_match_and_many_groups(
- mapping_fixtures.TESTER_ASSERTION)
-
- def test_rule_engine_discards_nonstring_objects(self):
- """Check whether RuleProcessor discards non string objects.
-
- Despite the fact that assertion is malformed and contains
- non string objects, RuleProcessor should correctly discard them and
- successfully have a match in MAPPING_LARGE.
-
- """
- self._rule_engine_regex_match_and_many_groups(
- mapping_fixtures.MALFORMED_TESTER_ASSERTION)
-
- def test_rule_engine_fails_after_discarding_nonstring(self):
- """Check whether RuleProcessor discards non string objects.
-
- Expect RuleProcessor to discard non string object, which
- is required for a correct rule match. RuleProcessor will result with
- empty list of groups.
-
- """
- mapping = mapping_fixtures.MAPPING_SMALL
- rp = mapping_utils.RuleProcessor(mapping['rules'])
- assertion = mapping_fixtures.CONTRACTOR_MALFORMED_ASSERTION
- mapped_properties = rp.process(assertion)
- self.assertValidMappedUserObject(mapped_properties)
- self.assertIsNone(mapped_properties['user'].get('name'))
- self.assertListEqual(list(), mapped_properties['group_ids'])
-
- def test_rule_engine_returns_group_names(self):
- """Check whether RuleProcessor returns group names with their domains.
-
- RuleProcessor should return 'group_names' entry with a list of
- dictionaries with two entries 'name' and 'domain' identifying group by
- its name and domain.
-
- """
- mapping = mapping_fixtures.MAPPING_GROUP_NAMES
- rp = mapping_utils.RuleProcessor(mapping['rules'])
- assertion = mapping_fixtures.EMPLOYEE_ASSERTION
- mapped_properties = rp.process(assertion)
- self.assertIsNotNone(mapped_properties)
- self.assertValidMappedUserObject(mapped_properties)
- reference = {
- mapping_fixtures.DEVELOPER_GROUP_NAME:
- {
- "name": mapping_fixtures.DEVELOPER_GROUP_NAME,
- "domain": {
- "name": mapping_fixtures.DEVELOPER_GROUP_DOMAIN_NAME
- }
- },
- mapping_fixtures.TESTER_GROUP_NAME:
- {
- "name": mapping_fixtures.TESTER_GROUP_NAME,
- "domain": {
- "id": mapping_fixtures.DEVELOPER_GROUP_DOMAIN_ID
- }
- }
- }
- for rule in mapped_properties['group_names']:
- self.assertDictEqual(reference.get(rule.get('name')), rule)
-
- def test_rule_engine_whitelist_and_direct_groups_mapping(self):
- """Should return user's groups Developer and Contractor.
-
- The EMPLOYEE_ASSERTION_MULTIPLE_GROUPS should successfully have a match
- in MAPPING_GROUPS_WHITELIST. It will test the case where 'whitelist'
- correctly filters out Manager and only allows Developer and Contractor.
-
- """
-
- mapping = mapping_fixtures.MAPPING_GROUPS_WHITELIST
- assertion = mapping_fixtures.EMPLOYEE_ASSERTION_MULTIPLE_GROUPS
- rp = mapping_utils.RuleProcessor(mapping['rules'])
- mapped_properties = rp.process(assertion)
- self.assertIsNotNone(mapped_properties)
-
- reference = {
- mapping_fixtures.DEVELOPER_GROUP_NAME:
- {
- "name": mapping_fixtures.DEVELOPER_GROUP_NAME,
- "domain": {
- "id": mapping_fixtures.DEVELOPER_GROUP_DOMAIN_ID
- }
- },
- mapping_fixtures.CONTRACTOR_GROUP_NAME:
- {
- "name": mapping_fixtures.CONTRACTOR_GROUP_NAME,
- "domain": {
- "id": mapping_fixtures.DEVELOPER_GROUP_DOMAIN_ID
- }
- }
- }
- for rule in mapped_properties['group_names']:
- self.assertDictEqual(reference.get(rule.get('name')), rule)
-
- self.assertEqual('tbo', mapped_properties['user']['name'])
- self.assertEqual([], mapped_properties['group_ids'])
-
- def test_rule_engine_blacklist_and_direct_groups_mapping(self):
- """Should return user's group Developer.
-
- The EMPLOYEE_ASSERTION_MULTIPLE_GROUPS should successfully have a match
- in MAPPING_GROUPS_BLACKLIST. It will test the case where 'blacklist'
- correctly filters out Manager and Developer and only allows Contractor.
-
- """
-
- mapping = mapping_fixtures.MAPPING_GROUPS_BLACKLIST
- assertion = mapping_fixtures.EMPLOYEE_ASSERTION_MULTIPLE_GROUPS
- rp = mapping_utils.RuleProcessor(mapping['rules'])
- mapped_properties = rp.process(assertion)
- self.assertIsNotNone(mapped_properties)
-
- reference = {
- mapping_fixtures.CONTRACTOR_GROUP_NAME:
- {
- "name": mapping_fixtures.CONTRACTOR_GROUP_NAME,
- "domain": {
- "id": mapping_fixtures.DEVELOPER_GROUP_DOMAIN_ID
- }
- }
- }
- for rule in mapped_properties['group_names']:
- self.assertDictEqual(reference.get(rule.get('name')), rule)
- self.assertEqual('tbo', mapped_properties['user']['name'])
- self.assertEqual([], mapped_properties['group_ids'])
-
- def test_rule_engine_blacklist_and_direct_groups_mapping_multiples(self):
- """Tests matching multiple values before the blacklist.
-
- Verifies that the local indexes are correct when matching multiple
- remote values for a field when the field occurs before the blacklist
- entry in the remote rules.
-
- """
-
- mapping = mapping_fixtures.MAPPING_GROUPS_BLACKLIST_MULTIPLES
- assertion = mapping_fixtures.EMPLOYEE_ASSERTION_MULTIPLE_GROUPS
- rp = mapping_utils.RuleProcessor(mapping['rules'])
- mapped_properties = rp.process(assertion)
- self.assertIsNotNone(mapped_properties)
-
- reference = {
- mapping_fixtures.CONTRACTOR_GROUP_NAME:
- {
- "name": mapping_fixtures.CONTRACTOR_GROUP_NAME,
- "domain": {
- "id": mapping_fixtures.DEVELOPER_GROUP_DOMAIN_ID
- }
- }
- }
- for rule in mapped_properties['group_names']:
- self.assertDictEqual(reference.get(rule.get('name')), rule)
- self.assertEqual('tbo', mapped_properties['user']['name'])
- self.assertEqual([], mapped_properties['group_ids'])
-
- def test_rule_engine_whitelist_direct_group_mapping_missing_domain(self):
- """Test if the local rule is rejected upon missing domain value
-
- This is a variation with a ``whitelist`` filter.
-
- """
- mapping = mapping_fixtures.MAPPING_GROUPS_WHITELIST_MISSING_DOMAIN
- assertion = mapping_fixtures.EMPLOYEE_ASSERTION_MULTIPLE_GROUPS
- rp = mapping_utils.RuleProcessor(mapping['rules'])
- self.assertRaises(exception.ValidationError, rp.process, assertion)
-
- def test_rule_engine_blacklist_direct_group_mapping_missing_domain(self):
- """Test if the local rule is rejected upon missing domain value
-
- This is a variation with a ``blacklist`` filter.
-
- """
- mapping = mapping_fixtures.MAPPING_GROUPS_BLACKLIST_MISSING_DOMAIN
- assertion = mapping_fixtures.EMPLOYEE_ASSERTION_MULTIPLE_GROUPS
- rp = mapping_utils.RuleProcessor(mapping['rules'])
- self.assertRaises(exception.ValidationError, rp.process, assertion)
-
- def test_rule_engine_no_groups_allowed(self):
- """Should return user mapped to no groups.
-
- The EMPLOYEE_ASSERTION should successfully have a match
- in MAPPING_GROUPS_WHITELIST, but 'whitelist' should filter out
- the group values from the assertion and thus map to no groups.
-
- """
- mapping = mapping_fixtures.MAPPING_GROUPS_WHITELIST
- assertion = mapping_fixtures.EMPLOYEE_ASSERTION
- rp = mapping_utils.RuleProcessor(mapping['rules'])
- mapped_properties = rp.process(assertion)
- self.assertIsNotNone(mapped_properties)
- self.assertListEqual(mapped_properties['group_names'], [])
- self.assertListEqual(mapped_properties['group_ids'], [])
- self.assertEqual('tbo', mapped_properties['user']['name'])
-
- def test_mapping_federated_domain_specified(self):
- """Test mapping engine when domain 'ephemeral' is explicitely set.
-
- For that, we use mapping rule MAPPING_EPHEMERAL_USER and assertion
- EMPLOYEE_ASSERTION
-
- """
- mapping = mapping_fixtures.MAPPING_EPHEMERAL_USER
- rp = mapping_utils.RuleProcessor(mapping['rules'])
- assertion = mapping_fixtures.EMPLOYEE_ASSERTION
- mapped_properties = rp.process(assertion)
- self.assertIsNotNone(mapped_properties)
- self.assertValidMappedUserObject(mapped_properties)
-
- def test_create_user_object_with_bad_mapping(self):
- """Test if user object is created even with bad mapping.
-
- User objects will be created by mapping engine always as long as there
- is corresponding local rule. This test shows, that even with assertion
- where no group names nor ids are matched, but there is 'blind' rule for
- mapping user, such object will be created.
-
- In this test MAPPING_EHPEMERAL_USER expects UserName set to jsmith
- whereas value from assertion is 'tbo'.
-
- """
- mapping = mapping_fixtures.MAPPING_EPHEMERAL_USER
- rp = mapping_utils.RuleProcessor(mapping['rules'])
- assertion = mapping_fixtures.CONTRACTOR_ASSERTION
- mapped_properties = rp.process(assertion)
- self.assertIsNotNone(mapped_properties)
- self.assertValidMappedUserObject(mapped_properties)
-
- self.assertNotIn('id', mapped_properties['user'])
- self.assertNotIn('name', mapped_properties['user'])
-
- def test_set_ephemeral_domain_to_ephemeral_users(self):
- """Test auto assigning service domain to ephemeral users.
-
- Test that ephemeral users will always become members of federated
- service domain. The check depends on ``type`` value which must be set
- to ``ephemeral`` in case of ephemeral user.
-
- """
- mapping = mapping_fixtures.MAPPING_EPHEMERAL_USER_LOCAL_DOMAIN
- rp = mapping_utils.RuleProcessor(mapping['rules'])
- assertion = mapping_fixtures.CONTRACTOR_ASSERTION
- mapped_properties = rp.process(assertion)
- self.assertIsNotNone(mapped_properties)
- self.assertValidMappedUserObject(mapped_properties)
-
- def test_local_user_local_domain(self):
- """Test that local users can have non-service domains assigned."""
- mapping = mapping_fixtures.MAPPING_LOCAL_USER_LOCAL_DOMAIN
- rp = mapping_utils.RuleProcessor(mapping['rules'])
- assertion = mapping_fixtures.CONTRACTOR_ASSERTION
- mapped_properties = rp.process(assertion)
- self.assertIsNotNone(mapped_properties)
- self.assertValidMappedUserObject(
- mapped_properties, user_type='local',
- domain_id=mapping_fixtures.LOCAL_DOMAIN)
-
- def test_user_identifications_name(self):
- """Test varius mapping options and how users are identified.
-
- This test calls mapped.setup_username() for propagating user object.
-
- Test plan:
- - Check if the user has proper domain ('federated') set
- - Check if the user has property type set ('ephemeral')
- - Check if user's name is properly mapped from the assertion
- - Check if user's id is properly set and equal to name, as it was not
- explicitely specified in the mapping.
-
- """
- mapping = mapping_fixtures.MAPPING_USER_IDS
- rp = mapping_utils.RuleProcessor(mapping['rules'])
- assertion = mapping_fixtures.CONTRACTOR_ASSERTION
- mapped_properties = rp.process(assertion)
- self.assertIsNotNone(mapped_properties)
- self.assertValidMappedUserObject(mapped_properties)
- mapped.setup_username({}, mapped_properties)
- self.assertEqual('jsmith', mapped_properties['user']['id'])
- self.assertEqual('jsmith', mapped_properties['user']['name'])
-
- def test_user_identifications_name_and_federated_domain(self):
- """Test varius mapping options and how users are identified.
-
- This test calls mapped.setup_username() for propagating user object.
-
- Test plan:
- - Check if the user has proper domain ('federated') set
- - Check if the user has propert type set ('ephemeral')
- - Check if user's name is properly mapped from the assertion
- - Check if user's id is properly set and equal to name, as it was not
- explicitely specified in the mapping.
-
- """
- mapping = mapping_fixtures.MAPPING_USER_IDS
- rp = mapping_utils.RuleProcessor(mapping['rules'])
- assertion = mapping_fixtures.EMPLOYEE_ASSERTION
- mapped_properties = rp.process(assertion)
- self.assertIsNotNone(mapped_properties)
- self.assertValidMappedUserObject(mapped_properties)
- mapped.setup_username({}, mapped_properties)
- self.assertEqual('tbo', mapped_properties['user']['name'])
- self.assertEqual('tbo', mapped_properties['user']['id'])
-
- def test_user_identification_id(self):
- """Test varius mapping options and how users are identified.
-
- This test calls mapped.setup_username() for propagating user object.
-
- Test plan:
- - Check if the user has proper domain ('federated') set
- - Check if the user has propert type set ('ephemeral')
- - Check if user's id is properly mapped from the assertion
- - Check if user's name is properly set and equal to id, as it was not
- explicitely specified in the mapping.
-
- """
- mapping = mapping_fixtures.MAPPING_USER_IDS
- rp = mapping_utils.RuleProcessor(mapping['rules'])
- assertion = mapping_fixtures.ADMIN_ASSERTION
- mapped_properties = rp.process(assertion)
- context = {'environment': {}}
- self.assertIsNotNone(mapped_properties)
- self.assertValidMappedUserObject(mapped_properties)
- mapped.setup_username(context, mapped_properties)
- self.assertEqual('bob', mapped_properties['user']['name'])
- self.assertEqual('bob', mapped_properties['user']['id'])
-
- def test_user_identification_id_and_name(self):
- """Test varius mapping options and how users are identified.
-
- This test calls mapped.setup_username() for propagating user object.
-
- Test plan:
- - Check if the user has proper domain ('federated') set
- - Check if the user has proper type set ('ephemeral')
- - Check if user's name is properly mapped from the assertion
- - Check if user's id is properly set and and equal to value hardcoded
- in the mapping
-
- """
- mapping = mapping_fixtures.MAPPING_USER_IDS
- rp = mapping_utils.RuleProcessor(mapping['rules'])
- assertion = mapping_fixtures.CUSTOMER_ASSERTION
- mapped_properties = rp.process(assertion)
- context = {'environment': {}}
- self.assertIsNotNone(mapped_properties)
- self.assertValidMappedUserObject(mapped_properties)
- mapped.setup_username(context, mapped_properties)
- self.assertEqual('bwilliams', mapped_properties['user']['name'])
- self.assertEqual('abc123', mapped_properties['user']['id'])
-
-
class FederatedTokenTests(FederationTests, FederatedSetupMixin):
def auth_plugin_config_override(self):
methods = ['saml2']
- method_classes = {'saml2': 'keystone.auth.plugins.saml2.Saml2'}
- super(FederatedTokenTests, self).auth_plugin_config_override(
- methods, **method_classes)
+ super(FederatedTokenTests, self).auth_plugin_config_override(methods)
def setUp(self):
super(FederatedTokenTests, self).setUp()
@@ -1923,7 +1525,8 @@ class FederatedTokenTests(FederationTests, FederatedSetupMixin):
def test_issue_unscoped_token_with_remote_no_attribute(self):
r = self._issue_unscoped_token(idp=self.IDP_WITH_REMOTE,
environment={
- self.REMOTE_ID_ATTR: self.REMOTE_ID
+ self.REMOTE_ID_ATTR:
+ self.REMOTE_IDS[0]
})
self.assertIsNotNone(r.headers.get('X-Subject-Token'))
@@ -1932,7 +1535,18 @@ class FederatedTokenTests(FederationTests, FederatedSetupMixin):
remote_id_attribute=self.REMOTE_ID_ATTR)
r = self._issue_unscoped_token(idp=self.IDP_WITH_REMOTE,
environment={
- self.REMOTE_ID_ATTR: self.REMOTE_ID
+ self.REMOTE_ID_ATTR:
+ self.REMOTE_IDS[0]
+ })
+ self.assertIsNotNone(r.headers.get('X-Subject-Token'))
+
+ def test_issue_unscoped_token_with_saml2_remote(self):
+ self.config_fixture.config(group='saml2',
+ remote_id_attribute=self.REMOTE_ID_ATTR)
+ r = self._issue_unscoped_token(idp=self.IDP_WITH_REMOTE,
+ environment={
+ self.REMOTE_ID_ATTR:
+ self.REMOTE_IDS[0]
})
self.assertIsNotNone(r.headers.get('X-Subject-Token'))
@@ -1946,6 +1560,25 @@ class FederatedTokenTests(FederationTests, FederatedSetupMixin):
self.REMOTE_ID_ATTR: uuid.uuid4().hex
})
+ def test_issue_unscoped_token_with_remote_default_overwritten(self):
+ """Test that protocol remote_id_attribute has higher priority.
+
+ Make sure the parameter stored under ``protocol`` section has higher
+ priority over parameter from default ``federation`` configuration
+ section.
+
+ """
+ self.config_fixture.config(group='saml2',
+ remote_id_attribute=self.REMOTE_ID_ATTR)
+ self.config_fixture.config(group='federation',
+ remote_id_attribute=uuid.uuid4().hex)
+ r = self._issue_unscoped_token(idp=self.IDP_WITH_REMOTE,
+ environment={
+ self.REMOTE_ID_ATTR:
+ self.REMOTE_IDS[0]
+ })
+ self.assertIsNotNone(r.headers.get('X-Subject-Token'))
+
def test_issue_unscoped_token_with_remote_unavailable(self):
self.config_fixture.config(group='federation',
remote_id_attribute=self.REMOTE_ID_ATTR)
@@ -1979,7 +1612,7 @@ class FederatedTokenTests(FederationTests, FederatedSetupMixin):
context = {
'environment': {
'malformed_object': object(),
- 'another_bad_idea': tuple(xrange(10)),
+ 'another_bad_idea': tuple(range(10)),
'yet_another_bad_param': dict(zip(uuid.uuid4().hex,
range(32)))
}
@@ -2156,6 +1789,44 @@ class FederatedTokenTests(FederationTests, FederatedSetupMixin):
self.assertEqual(projects_ref, projects,
'match failed for url %s' % url)
+ # TODO(samueldmq): Create another test class for role inheritance tests.
+ # The advantage would be to reduce the complexity of this test class and
+ # have tests specific to this fuctionality grouped, easing readability and
+ # maintenability.
+ def test_list_projects_for_inherited_project_assignment(self):
+ # Enable os_inherit extension
+ self.config_fixture.config(group='os_inherit', enabled=True)
+
+ # Create a subproject
+ subproject_inherited = self.new_project_ref(
+ domain_id=self.domainD['id'],
+ parent_id=self.project_inherited['id'])
+ self.resource_api.create_project(subproject_inherited['id'],
+ subproject_inherited)
+
+ # Create an inherited role assignment
+ self.assignment_api.create_grant(
+ role_id=self.role_employee['id'],
+ group_id=self.group_employees['id'],
+ project_id=self.project_inherited['id'],
+ inherited_to_projects=True)
+
+ # Define expected projects from employee assertion, which contain
+ # the created subproject
+ expected_project_ids = [self.project_all['id'],
+ self.proj_employees['id'],
+ subproject_inherited['id']]
+
+ # Assert expected projects for both available URLs
+ for url in ('/OS-FEDERATION/projects', '/auth/projects'):
+ r = self.get(url, token=self.tokens['EMPLOYEE_ASSERTION'])
+ project_ids = [project['id'] for project in r.result['projects']]
+
+ self.assertEqual(len(expected_project_ids), len(project_ids))
+ for expected_project_id in expected_project_ids:
+ self.assertIn(expected_project_id, project_ids,
+ 'Projects match failed for url %s' % url)
+
def test_list_domains(self):
urls = ('/OS-FEDERATION/domains', '/auth/domains')
@@ -2325,7 +1996,6 @@ class FederatedTokenTests(FederationTests, FederatedSetupMixin):
"remote": [
{
"type": "REMOTE_USER_GROUPS",
- "blacklist": ["noblacklist"]
}
]
}
@@ -2333,10 +2003,290 @@ class FederatedTokenTests(FederationTests, FederatedSetupMixin):
}
self.federation_api.update_mapping(self.mapping['id'], rules)
+ def test_empty_blacklist_passess_all_values(self):
+ """Test a mapping with empty blacklist specified
+
+ Not adding a ``blacklist`` keyword to the mapping rules has the same
+ effect as adding an empty ``blacklist``.
+ In both cases, the mapping engine will not discard any groups that are
+ associated with apache environment variables.
+
+ This test checks scenario where an empty blacklist was specified.
+ Expected result is to allow any value.
+
+ The test scenario is as follows:
+ - Create group ``EXISTS``
+ - Create group ``NO_EXISTS``
+ - Set mapping rules for existing IdP with a blacklist
+ that passes through as REMOTE_USER_GROUPS
+ - Issue unscoped token with groups ``EXISTS`` and ``NO_EXISTS``
+ assigned
+
+ """
+
+ domain_id = self.domainA['id']
+ domain_name = self.domainA['name']
+
+ # Add a group "EXISTS"
+ group_exists = self.new_group_ref(domain_id=domain_id)
+ group_exists['name'] = 'EXISTS'
+ group_exists = self.identity_api.create_group(group_exists)
+
+ # Add a group "NO_EXISTS"
+ group_no_exists = self.new_group_ref(domain_id=domain_id)
+ group_no_exists['name'] = 'NO_EXISTS'
+ group_no_exists = self.identity_api.create_group(group_no_exists)
+
+ group_ids = set([group_exists['id'], group_no_exists['id']])
+
+ rules = {
+ 'rules': [
+ {
+ "local": [
+ {
+ "user": {
+ "name": "{0}",
+ "id": "{0}"
+ }
+ }
+ ],
+ "remote": [
+ {
+ "type": "REMOTE_USER"
+ }
+ ]
+ },
+ {
+ "local": [
+ {
+ "groups": "{0}",
+ "domain": {"name": domain_name}
+ }
+ ],
+ "remote": [
+ {
+ "type": "REMOTE_USER_GROUPS",
+ "blacklist": []
+ }
+ ]
+ }
+ ]
+ }
+ self.federation_api.update_mapping(self.mapping['id'], rules)
+ r = self._issue_unscoped_token(assertion='UNMATCHED_GROUP_ASSERTION')
+ assigned_group_ids = r.json['token']['user']['OS-FEDERATION']['groups']
+ self.assertEqual(len(group_ids), len(assigned_group_ids))
+ for group in assigned_group_ids:
+ self.assertIn(group['id'], group_ids)
+
+ def test_not_adding_blacklist_passess_all_values(self):
+ """Test a mapping without blacklist specified.
+
+ Not adding a ``blacklist`` keyword to the mapping rules has the same
+ effect as adding an empty ``blacklist``. In both cases all values will
+ be accepted and passed.
+
+ This test checks scenario where an blacklist was not specified.
+ Expected result is to allow any value.
+
+ The test scenario is as follows:
+ - Create group ``EXISTS``
+ - Create group ``NO_EXISTS``
+ - Set mapping rules for existing IdP with a blacklist
+ that passes through as REMOTE_USER_GROUPS
+ - Issue unscoped token with on groups ``EXISTS`` and ``NO_EXISTS``
+ assigned
+
+ """
+
+ domain_id = self.domainA['id']
+ domain_name = self.domainA['name']
+
+ # Add a group "EXISTS"
+ group_exists = self.new_group_ref(domain_id=domain_id)
+ group_exists['name'] = 'EXISTS'
+ group_exists = self.identity_api.create_group(group_exists)
+
+ # Add a group "NO_EXISTS"
+ group_no_exists = self.new_group_ref(domain_id=domain_id)
+ group_no_exists['name'] = 'NO_EXISTS'
+ group_no_exists = self.identity_api.create_group(group_no_exists)
+
+ group_ids = set([group_exists['id'], group_no_exists['id']])
+
+ rules = {
+ 'rules': [
+ {
+ "local": [
+ {
+ "user": {
+ "name": "{0}",
+ "id": "{0}"
+ }
+ }
+ ],
+ "remote": [
+ {
+ "type": "REMOTE_USER"
+ }
+ ]
+ },
+ {
+ "local": [
+ {
+ "groups": "{0}",
+ "domain": {"name": domain_name}
+ }
+ ],
+ "remote": [
+ {
+ "type": "REMOTE_USER_GROUPS",
+ }
+ ]
+ }
+ ]
+ }
+ self.federation_api.update_mapping(self.mapping['id'], rules)
+ r = self._issue_unscoped_token(assertion='UNMATCHED_GROUP_ASSERTION')
+ assigned_group_ids = r.json['token']['user']['OS-FEDERATION']['groups']
+ self.assertEqual(len(group_ids), len(assigned_group_ids))
+ for group in assigned_group_ids:
+ self.assertIn(group['id'], group_ids)
+
+ def test_empty_whitelist_discards_all_values(self):
+ """Test that empty whitelist blocks all the values
+
+ Not adding a ``whitelist`` keyword to the mapping value is different
+ than adding empty whitelist. The former case will simply pass all the
+ values, whereas the latter would discard all the values.
+
+ This test checks scenario where an empty whitelist was specified.
+ The expected result is that no groups are matched.
+
+ The test scenario is as follows:
+ - Create group ``EXISTS``
+ - Set mapping rules for existing IdP with an empty whitelist
+ that whould discard any values from the assertion
+ - Try issuing unscoped token, expect server to raise
+ ``exception.MissingGroups`` as no groups were matched and ephemeral
+ user does not have any group assigned.
+
+ """
+ domain_id = self.domainA['id']
+ domain_name = self.domainA['name']
+ group = self.new_group_ref(domain_id=domain_id)
+ group['name'] = 'EXISTS'
+ group = self.identity_api.create_group(group)
+ rules = {
+ 'rules': [
+ {
+ "local": [
+ {
+ "user": {
+ "name": "{0}",
+ "id": "{0}"
+ }
+ }
+ ],
+ "remote": [
+ {
+ "type": "REMOTE_USER"
+ }
+ ]
+ },
+ {
+ "local": [
+ {
+ "groups": "{0}",
+ "domain": {"name": domain_name}
+ }
+ ],
+ "remote": [
+ {
+ "type": "REMOTE_USER_GROUPS",
+ "whitelist": []
+ }
+ ]
+ }
+ ]
+ }
+ self.federation_api.update_mapping(self.mapping['id'], rules)
+
+ self.assertRaises(exception.MissingGroups,
+ self._issue_unscoped_token,
+ assertion='UNMATCHED_GROUP_ASSERTION')
+
+ def test_not_setting_whitelist_accepts_all_values(self):
+ """Test that not setting whitelist passes
+
+ Not adding a ``whitelist`` keyword to the mapping value is different
+ than adding empty whitelist. The former case will simply pass all the
+ values, whereas the latter would discard all the values.
+
+ This test checks a scenario where a ``whitelist`` was not specified.
+ Expected result is that no groups are ignored.
+
+ The test scenario is as follows:
+ - Create group ``EXISTS``
+ - Set mapping rules for existing IdP with an empty whitelist
+ that whould discard any values from the assertion
+ - Issue an unscoped token and make sure ephemeral user is a member of
+ two groups.
+
+ """
+ domain_id = self.domainA['id']
+ domain_name = self.domainA['name']
+
+ # Add a group "EXISTS"
+ group_exists = self.new_group_ref(domain_id=domain_id)
+ group_exists['name'] = 'EXISTS'
+ group_exists = self.identity_api.create_group(group_exists)
+
+ # Add a group "NO_EXISTS"
+ group_no_exists = self.new_group_ref(domain_id=domain_id)
+ group_no_exists['name'] = 'NO_EXISTS'
+ group_no_exists = self.identity_api.create_group(group_no_exists)
+
+ group_ids = set([group_exists['id'], group_no_exists['id']])
+
+ rules = {
+ 'rules': [
+ {
+ "local": [
+ {
+ "user": {
+ "name": "{0}",
+ "id": "{0}"
+ }
+ }
+ ],
+ "remote": [
+ {
+ "type": "REMOTE_USER"
+ }
+ ]
+ },
+ {
+ "local": [
+ {
+ "groups": "{0}",
+ "domain": {"name": domain_name}
+ }
+ ],
+ "remote": [
+ {
+ "type": "REMOTE_USER_GROUPS",
+ }
+ ]
+ }
+ ]
+ }
+ self.federation_api.update_mapping(self.mapping['id'], rules)
r = self._issue_unscoped_token(assertion='UNMATCHED_GROUP_ASSERTION')
assigned_group_ids = r.json['token']['user']['OS-FEDERATION']['groups']
- self.assertEqual(1, len(assigned_group_ids))
- self.assertEqual(group['id'], assigned_group_ids[0]['id'])
+ self.assertEqual(len(group_ids), len(assigned_group_ids))
+ for group in assigned_group_ids:
+ self.assertIn(group['id'], group_ids)
def test_assertion_prefix_parameter(self):
"""Test parameters filtering based on the prefix.
@@ -2416,27 +2366,24 @@ class FernetFederatedTokenTests(FederationTests, FederatedSetupMixin):
super(FernetFederatedTokenTests, self).load_fixtures(fixtures)
self.load_federation_sample_data()
+ def config_overrides(self):
+ super(FernetFederatedTokenTests, self).config_overrides()
+ self.config_fixture.config(group='token', provider='fernet')
+ self.useFixture(ksfixtures.KeyRepository(self.config_fixture))
+
def auth_plugin_config_override(self):
methods = ['saml2', 'token', 'password']
- method_classes = dict(
- password='keystone.auth.plugins.password.Password',
- token='keystone.auth.plugins.token.Token',
- saml2='keystone.auth.plugins.saml2.Saml2')
super(FernetFederatedTokenTests,
- self).auth_plugin_config_override(methods, **method_classes)
- self.config_fixture.config(
- group='token',
- provider='keystone.token.providers.fernet.Provider')
- self.useFixture(ksfixtures.KeyRepository(self.config_fixture))
+ self).auth_plugin_config_override(methods)
def test_federated_unscoped_token(self):
resp = self._issue_unscoped_token()
- self.assertEqual(186, len(resp.headers['X-Subject-Token']))
+ self.assertEqual(204, len(resp.headers['X-Subject-Token']))
def test_federated_unscoped_token_with_multiple_groups(self):
assertion = 'ANOTHER_CUSTOMER_ASSERTION'
resp = self._issue_unscoped_token(assertion=assertion)
- self.assertEqual(204, len(resp.headers['X-Subject-Token']))
+ self.assertEqual(232, len(resp.headers['X-Subject-Token']))
def test_validate_federated_unscoped_token(self):
resp = self._issue_unscoped_token()
@@ -2481,11 +2428,8 @@ class FederatedTokenTestsMethodToken(FederatedTokenTests):
def auth_plugin_config_override(self):
methods = ['saml2', 'token']
- method_classes = dict(
- token='keystone.auth.plugins.token.Token',
- saml2='keystone.auth.plugins.saml2.Saml2')
super(FederatedTokenTests,
- self).auth_plugin_config_override(methods, **method_classes)
+ self).auth_plugin_config_override(methods)
class JsonHomeTests(FederationTests, test_v3.JsonHomeTestMixin):
@@ -2520,12 +2464,20 @@ class SAMLGenerationTests(FederationTests):
SP_AUTH_URL = ('http://beta.com:5000/v3/OS-FEDERATION/identity_providers'
'/BETA/protocols/saml2/auth')
+
+ ASSERTION_FILE = 'signed_saml2_assertion.xml'
+
+ # The values of the following variables match the attributes values found
+ # in ASSERTION_FILE
ISSUER = 'https://acme.com/FIM/sps/openstack/saml20'
RECIPIENT = 'http://beta.com/Shibboleth.sso/SAML2/POST'
SUBJECT = 'test_user'
+ SUBJECT_DOMAIN = 'user_domain'
ROLES = ['admin', 'member']
PROJECT = 'development'
+ PROJECT_DOMAIN = 'project_domain'
SAML_GENERATION_ROUTE = '/auth/OS-FEDERATION/saml2'
+ ECP_GENERATION_ROUTE = '/auth/OS-FEDERATION/saml2/ecp'
ASSERTION_VERSION = "2.0"
SERVICE_PROVDIER_ID = 'ACME'
@@ -2535,6 +2487,7 @@ class SAMLGenerationTests(FederationTests):
'enabled': True,
'description': uuid.uuid4().hex,
'sp_url': self.RECIPIENT,
+ 'relay_state_prefix': CONF.saml.relay_state_prefix,
}
return ref
@@ -2542,9 +2495,11 @@ class SAMLGenerationTests(FederationTests):
def setUp(self):
super(SAMLGenerationTests, self).setUp()
self.signed_assertion = saml2.create_class_from_xml_string(
- saml.Assertion, _load_xml('signed_saml2_assertion.xml'))
+ saml.Assertion, _load_xml(self.ASSERTION_FILE))
self.sp = self.sp_ref()
- self.federation_api.create_sp(self.SERVICE_PROVDIER_ID, self.sp)
+ url = '/OS-FEDERATION/service_providers/' + self.SERVICE_PROVDIER_ID
+ self.put(url, body={'service_provider': self.sp},
+ expected_status=201)
def test_samlize_token_values(self):
"""Test the SAML generator produces a SAML object.
@@ -2558,8 +2513,10 @@ class SAMLGenerationTests(FederationTests):
return_value=self.signed_assertion):
generator = keystone_idp.SAMLGenerator()
response = generator.samlize_token(self.ISSUER, self.RECIPIENT,
- self.SUBJECT, self.ROLES,
- self.PROJECT)
+ self.SUBJECT,
+ self.SUBJECT_DOMAIN,
+ self.ROLES, self.PROJECT,
+ self.PROJECT_DOMAIN)
assertion = response.assertion
self.assertIsNotNone(assertion)
@@ -2571,14 +2528,24 @@ class SAMLGenerationTests(FederationTests):
user_attribute = assertion.attribute_statement[0].attribute[0]
self.assertEqual(self.SUBJECT, user_attribute.attribute_value[0].text)
- role_attribute = assertion.attribute_statement[0].attribute[1]
+ user_domain_attribute = (
+ assertion.attribute_statement[0].attribute[1])
+ self.assertEqual(self.SUBJECT_DOMAIN,
+ user_domain_attribute.attribute_value[0].text)
+
+ role_attribute = assertion.attribute_statement[0].attribute[2]
for attribute_value in role_attribute.attribute_value:
self.assertIn(attribute_value.text, self.ROLES)
- project_attribute = assertion.attribute_statement[0].attribute[2]
+ project_attribute = assertion.attribute_statement[0].attribute[3]
self.assertEqual(self.PROJECT,
project_attribute.attribute_value[0].text)
+ project_domain_attribute = (
+ assertion.attribute_statement[0].attribute[4])
+ self.assertEqual(self.PROJECT_DOMAIN,
+ project_domain_attribute.attribute_value[0].text)
+
def test_verify_assertion_object(self):
"""Test that the Assertion object is built properly.
@@ -2590,8 +2557,10 @@ class SAMLGenerationTests(FederationTests):
side_effect=lambda x: x):
generator = keystone_idp.SAMLGenerator()
response = generator.samlize_token(self.ISSUER, self.RECIPIENT,
- self.SUBJECT, self.ROLES,
- self.PROJECT)
+ self.SUBJECT,
+ self.SUBJECT_DOMAIN,
+ self.ROLES, self.PROJECT,
+ self.PROJECT_DOMAIN)
assertion = response.assertion
self.assertEqual(self.ASSERTION_VERSION, assertion.version)
@@ -2607,8 +2576,10 @@ class SAMLGenerationTests(FederationTests):
return_value=self.signed_assertion):
generator = keystone_idp.SAMLGenerator()
response = generator.samlize_token(self.ISSUER, self.RECIPIENT,
- self.SUBJECT, self.ROLES,
- self.PROJECT)
+ self.SUBJECT,
+ self.SUBJECT_DOMAIN,
+ self.ROLES, self.PROJECT,
+ self.PROJECT_DOMAIN)
saml_str = response.to_string()
response = etree.fromstring(saml_str)
@@ -2621,13 +2592,19 @@ class SAMLGenerationTests(FederationTests):
user_attribute = assertion[4][0]
self.assertEqual(self.SUBJECT, user_attribute[0].text)
- role_attribute = assertion[4][1]
+ user_domain_attribute = assertion[4][1]
+ self.assertEqual(self.SUBJECT_DOMAIN, user_domain_attribute[0].text)
+
+ role_attribute = assertion[4][2]
for attribute_value in role_attribute:
self.assertIn(attribute_value.text, self.ROLES)
- project_attribute = assertion[4][2]
+ project_attribute = assertion[4][3]
self.assertEqual(self.PROJECT, project_attribute[0].text)
+ project_domain_attribute = assertion[4][4]
+ self.assertEqual(self.PROJECT_DOMAIN, project_domain_attribute[0].text)
+
def test_assertion_using_explicit_namespace_prefixes(self):
def mocked_subprocess_check_output(*popenargs, **kwargs):
# the last option is the assertion file to be signed
@@ -2642,8 +2619,10 @@ class SAMLGenerationTests(FederationTests):
side_effect=mocked_subprocess_check_output):
generator = keystone_idp.SAMLGenerator()
response = generator.samlize_token(self.ISSUER, self.RECIPIENT,
- self.SUBJECT, self.ROLES,
- self.PROJECT)
+ self.SUBJECT,
+ self.SUBJECT_DOMAIN,
+ self.ROLES, self.PROJECT,
+ self.PROJECT_DOMAIN)
assertion_xml = response.assertion.to_string()
# make sure we have the proper tag and prefix for the assertion
# namespace
@@ -2666,8 +2645,9 @@ class SAMLGenerationTests(FederationTests):
generator = keystone_idp.SAMLGenerator()
response = generator.samlize_token(self.ISSUER, self.RECIPIENT,
- self.SUBJECT, self.ROLES,
- self.PROJECT)
+ self.SUBJECT, self.SUBJECT_DOMAIN,
+ self.ROLES, self.PROJECT,
+ self.PROJECT_DOMAIN)
signature = response.assertion.signature
self.assertIsNotNone(signature)
@@ -2770,12 +2750,18 @@ class SAMLGenerationTests(FederationTests):
user_attribute = assertion[4][0]
self.assertIsInstance(user_attribute[0].text, str)
- role_attribute = assertion[4][1]
+ user_domain_attribute = assertion[4][1]
+ self.assertIsInstance(user_domain_attribute[0].text, str)
+
+ role_attribute = assertion[4][2]
self.assertIsInstance(role_attribute[0].text, str)
- project_attribute = assertion[4][2]
+ project_attribute = assertion[4][3]
self.assertIsInstance(project_attribute[0].text, str)
+ project_domain_attribute = assertion[4][4]
+ self.assertIsInstance(project_domain_attribute[0].text, str)
+
def test_invalid_scope_body(self):
"""Test that missing the scope in request body raises an exception.
@@ -2839,6 +2825,104 @@ class SAMLGenerationTests(FederationTests):
self.SERVICE_PROVDIER_ID)
self.post(self.SAML_GENERATION_ROUTE, body=body, expected_status=404)
+ def test_generate_ecp_route(self):
+ """Test that the ECP generation endpoint produces XML.
+
+ The ECP endpoint /v3/auth/OS-FEDERATION/saml2/ecp should take the same
+ input as the SAML generation endpoint (scoped token ID + Service
+ Provider ID).
+ The controller should return a SAML assertion that is wrapped in a
+ SOAP envelope.
+ """
+
+ self.config_fixture.config(group='saml', idp_entity_id=self.ISSUER)
+ token_id = self._fetch_valid_token()
+ body = self._create_generate_saml_request(token_id,
+ self.SERVICE_PROVDIER_ID)
+
+ with mock.patch.object(keystone_idp, '_sign_assertion',
+ return_value=self.signed_assertion):
+ http_response = self.post(self.ECP_GENERATION_ROUTE, body=body,
+ response_content_type='text/xml',
+ expected_status=200)
+
+ env_response = etree.fromstring(http_response.result)
+ header = env_response[0]
+
+ # Verify the relay state starts with 'ss:mem'
+ prefix = CONF.saml.relay_state_prefix
+ self.assertThat(header[0].text, matchers.StartsWith(prefix))
+
+ # Verify that the content in the body matches the expected assertion
+ body = env_response[1]
+ response = body[0]
+ issuer = response[0]
+ assertion = response[2]
+
+ self.assertEqual(self.RECIPIENT, response.get('Destination'))
+ self.assertEqual(self.ISSUER, issuer.text)
+
+ user_attribute = assertion[4][0]
+ self.assertIsInstance(user_attribute[0].text, str)
+
+ user_domain_attribute = assertion[4][1]
+ self.assertIsInstance(user_domain_attribute[0].text, str)
+
+ role_attribute = assertion[4][2]
+ self.assertIsInstance(role_attribute[0].text, str)
+
+ project_attribute = assertion[4][3]
+ self.assertIsInstance(project_attribute[0].text, str)
+
+ project_domain_attribute = assertion[4][4]
+ self.assertIsInstance(project_domain_attribute[0].text, str)
+
+ @mock.patch('saml2.create_class_from_xml_string')
+ @mock.patch('oslo_utils.fileutils.write_to_tempfile')
+ @mock.patch('subprocess.check_output')
+ def test__sign_assertion(self, check_output_mock,
+ write_to_tempfile_mock, create_class_mock):
+ write_to_tempfile_mock.return_value = 'tmp_path'
+ check_output_mock.return_value = 'fakeoutput'
+
+ keystone_idp._sign_assertion(self.signed_assertion)
+
+ create_class_mock.assert_called_with(saml.Assertion, 'fakeoutput')
+
+ @mock.patch('oslo_utils.fileutils.write_to_tempfile')
+ @mock.patch('subprocess.check_output')
+ def test__sign_assertion_exc(self, check_output_mock,
+ write_to_tempfile_mock):
+ # If the command fails the command output is logged.
+
+ write_to_tempfile_mock.return_value = 'tmp_path'
+
+ sample_returncode = 1
+ sample_output = self.getUniqueString()
+ check_output_mock.side_effect = subprocess.CalledProcessError(
+ returncode=sample_returncode, cmd=CONF.saml.xmlsec1_binary,
+ output=sample_output)
+
+ # FIXME(blk-u): This should raise exception.SAMLSigningError instead,
+ # but fails with TypeError due to concatenating string to Message, see
+ # bug 1484735.
+ self.assertRaises(TypeError,
+ keystone_idp._sign_assertion,
+ self.signed_assertion)
+
+ @mock.patch('oslo_utils.fileutils.write_to_tempfile')
+ def test__sign_assertion_fileutils_exc(self, write_to_tempfile_mock):
+ exception_msg = 'fake'
+ write_to_tempfile_mock.side_effect = Exception(exception_msg)
+
+ logger_fixture = self.useFixture(fixtures.LoggerFixture())
+ self.assertRaises(exception.SAMLSigningError,
+ keystone_idp._sign_assertion,
+ self.signed_assertion)
+ expected_log = (
+ 'Error when signing assertion, reason: %s\n' % exception_msg)
+ self.assertEqual(expected_log, logger_fixture.output)
+
class IdPMetadataGenerationTests(FederationTests):
"""A class for testing Identity Provider Metadata generation."""
@@ -2976,7 +3060,8 @@ class ServiceProviderTests(FederationTests):
MEMBER_NAME = 'service_provider'
COLLECTION_NAME = 'service_providers'
SERVICE_PROVIDER_ID = 'ACME'
- SP_KEYS = ['auth_url', 'id', 'enabled', 'description', 'sp_url']
+ SP_KEYS = ['auth_url', 'id', 'enabled', 'description',
+ 'relay_state_prefix', 'sp_url']
def setUp(self):
super(FederationTests, self).setUp()
@@ -2993,6 +3078,7 @@ class ServiceProviderTests(FederationTests):
'enabled': True,
'description': uuid.uuid4().hex,
'sp_url': 'https://' + uuid.uuid4().hex + '.com',
+ 'relay_state_prefix': CONF.saml.relay_state_prefix
}
return ref
@@ -3019,6 +3105,29 @@ class ServiceProviderTests(FederationTests):
self.assertValidEntity(resp.result['service_provider'],
keys_to_check=self.SP_KEYS)
+ def test_create_sp_relay_state_default(self):
+ """Create an SP without relay state, should default to `ss:mem`."""
+ url = self.base_url(suffix=uuid.uuid4().hex)
+ sp = self.sp_ref()
+ del sp['relay_state_prefix']
+ resp = self.put(url, body={'service_provider': sp},
+ expected_status=201)
+ sp_result = resp.result['service_provider']
+ self.assertEqual(CONF.saml.relay_state_prefix,
+ sp_result['relay_state_prefix'])
+
+ def test_create_sp_relay_state_non_default(self):
+ """Create an SP with custom relay state."""
+ url = self.base_url(suffix=uuid.uuid4().hex)
+ sp = self.sp_ref()
+ non_default_prefix = uuid.uuid4().hex
+ sp['relay_state_prefix'] = non_default_prefix
+ resp = self.put(url, body={'service_provider': sp},
+ expected_status=201)
+ sp_result = resp.result['service_provider']
+ self.assertEqual(non_default_prefix,
+ sp_result['relay_state_prefix'])
+
def test_create_service_provider_fail(self):
"""Try adding SP object with unallowed attribute."""
url = self.base_url(suffix=uuid.uuid4().hex)
@@ -3108,6 +3217,18 @@ class ServiceProviderTests(FederationTests):
self.patch(url, body={'service_provider': new_sp_ref},
expected_status=404)
+ def test_update_sp_relay_state(self):
+ """Update an SP with custome relay state."""
+ new_sp_ref = self.sp_ref()
+ non_default_prefix = uuid.uuid4().hex
+ new_sp_ref['relay_state_prefix'] = non_default_prefix
+ url = self.base_url(suffix=self.SERVICE_PROVIDER_ID)
+ resp = self.patch(url, body={'service_provider': new_sp_ref},
+ expected_status=200)
+ sp_result = resp.result['service_provider']
+ self.assertEqual(non_default_prefix,
+ sp_result['relay_state_prefix'])
+
def test_delete_service_provider(self):
url = self.base_url(suffix=self.SERVICE_PROVIDER_ID)
self.delete(url, expected_status=204)
@@ -3125,6 +3246,7 @@ class WebSSOTests(FederatedTokenTests):
SSO_TEMPLATE_PATH = os.path.join(core.dirs.etc(), SSO_TEMPLATE_NAME)
TRUSTED_DASHBOARD = 'http://horizon.com'
ORIGIN = urllib.parse.quote_plus(TRUSTED_DASHBOARD)
+ PROTOCOL_REMOTE_ID_ATTR = uuid.uuid4().hex
def setUp(self):
super(WebSSOTests, self).setUp()
@@ -3145,7 +3267,19 @@ class WebSSOTests(FederatedTokenTests):
self.assertIn(self.TRUSTED_DASHBOARD, resp.body)
def test_federated_sso_auth(self):
- environment = {self.REMOTE_ID_ATTR: self.REMOTE_ID}
+ environment = {self.REMOTE_ID_ATTR: self.REMOTE_IDS[0]}
+ context = {'environment': environment}
+ query_string = {'origin': self.ORIGIN}
+ self._inject_assertion(context, 'EMPLOYEE_ASSERTION', query_string)
+ resp = self.api.federated_sso_auth(context, self.PROTOCOL)
+ self.assertIn(self.TRUSTED_DASHBOARD, resp.body)
+
+ def test_federated_sso_auth_with_protocol_specific_remote_id(self):
+ self.config_fixture.config(
+ group=self.PROTOCOL,
+ remote_id_attribute=self.PROTOCOL_REMOTE_ID_ATTR)
+
+ environment = {self.PROTOCOL_REMOTE_ID_ATTR: self.REMOTE_IDS[0]}
context = {'environment': environment}
query_string = {'origin': self.ORIGIN}
self._inject_assertion(context, 'EMPLOYEE_ASSERTION', query_string)
@@ -3162,7 +3296,7 @@ class WebSSOTests(FederatedTokenTests):
context, self.PROTOCOL)
def test_federated_sso_missing_query(self):
- environment = {self.REMOTE_ID_ATTR: self.REMOTE_ID}
+ environment = {self.REMOTE_ID_ATTR: self.REMOTE_IDS[0]}
context = {'environment': environment}
self._inject_assertion(context, 'EMPLOYEE_ASSERTION')
self.assertRaises(exception.ValidationError,
@@ -3178,7 +3312,7 @@ class WebSSOTests(FederatedTokenTests):
context, self.PROTOCOL)
def test_federated_sso_untrusted_dashboard(self):
- environment = {self.REMOTE_ID_ATTR: self.REMOTE_ID}
+ environment = {self.REMOTE_ID_ATTR: self.REMOTE_IDS[0]}
context = {'environment': environment}
query_string = {'origin': uuid.uuid4().hex}
self._inject_assertion(context, 'EMPLOYEE_ASSERTION', query_string)
@@ -3229,6 +3363,7 @@ class K2KServiceCatalogTests(FederationTests):
def sp_response(self, id, ref):
ref.pop('enabled')
ref.pop('description')
+ ref.pop('relay_state_prefix')
ref['id'] = id
return ref
@@ -3238,6 +3373,7 @@ class K2KServiceCatalogTests(FederationTests):
'enabled': True,
'description': uuid.uuid4().hex,
'sp_url': uuid.uuid4().hex,
+ 'relay_state_prefix': CONF.saml.relay_state_prefix,
}
return ref
diff --git a/keystone-moon/keystone/tests/unit/test_v3_filters.py b/keystone-moon/keystone/tests/unit/test_v3_filters.py
index 4ad44657..668a2308 100644
--- a/keystone-moon/keystone/tests/unit/test_v3_filters.py
+++ b/keystone-moon/keystone/tests/unit/test_v3_filters.py
@@ -17,6 +17,7 @@ import uuid
from oslo_config import cfg
from oslo_serialization import jsonutils
+from six.moves import range
from keystone.tests.unit import filtering
from keystone.tests.unit.ksfixtures import temporaryfile
@@ -331,12 +332,6 @@ class IdentityTestListLimitCase(IdentityTestFilteredCase):
super(IdentityTestListLimitCase, self).setUp()
- self._set_policy({"identity:list_users": [],
- "identity:list_groups": [],
- "identity:list_projects": [],
- "identity:list_services": [],
- "identity:list_policies": []})
-
# Create 10 entries for each of the entities we are going to test
self.ENTITY_TYPES = ['user', 'group', 'project']
self.entity_lists = {}
@@ -398,6 +393,7 @@ class IdentityTestListLimitCase(IdentityTestFilteredCase):
else:
plural = '%ss' % entity
+ self._set_policy({"identity:list_%s" % plural: []})
self.config_fixture.config(list_limit=5)
self.config_fixture.config(group=driver, list_limit=None)
r = self.get('/%s' % plural, auth=self.auth)
@@ -435,6 +431,7 @@ class IdentityTestListLimitCase(IdentityTestFilteredCase):
def test_no_limit(self):
"""Check truncated attribute not set when list not limited."""
+ self._set_policy({"identity:list_services": []})
r = self.get('/services', auth=self.auth)
self.assertEqual(10, len(r.result.get('services')))
self.assertIsNone(r.result.get('truncated'))
@@ -445,6 +442,7 @@ class IdentityTestListLimitCase(IdentityTestFilteredCase):
# Test this by overriding the general limit with a higher
# driver-specific limit (allowing all entities to be returned
# in the collection), which should result in a non truncated list
+ self._set_policy({"identity:list_services": []})
self.config_fixture.config(list_limit=5)
self.config_fixture.config(group='catalog', list_limit=10)
r = self.get('/services', auth=self.auth)
diff --git a/keystone-moon/keystone/tests/unit/test_v3_identity.py b/keystone-moon/keystone/tests/unit/test_v3_identity.py
index ac077297..e0090829 100644
--- a/keystone-moon/keystone/tests/unit/test_v3_identity.py
+++ b/keystone-moon/keystone/tests/unit/test_v3_identity.py
@@ -12,8 +12,10 @@
# License for the specific language governing permissions and limitations
# under the License.
+import logging
import uuid
+import fixtures
from oslo_config import cfg
from testtools import matchers
@@ -434,6 +436,38 @@ class IdentityTestCase(test_v3.RestfulTestCase):
self.delete('/groups/%(group_id)s' % {
'group_id': self.group_id})
+ def test_create_user_password_not_logged(self):
+ # When a user is created, the password isn't logged at any level.
+
+ log_fix = self.useFixture(fixtures.FakeLogger(level=logging.DEBUG))
+
+ ref = self.new_user_ref(domain_id=self.domain_id)
+ self.post(
+ '/users',
+ body={'user': ref})
+
+ self.assertNotIn(ref['password'], log_fix.output)
+
+ def test_update_password_not_logged(self):
+ # When admin modifies user password, the password isn't logged at any
+ # level.
+
+ log_fix = self.useFixture(fixtures.FakeLogger(level=logging.DEBUG))
+
+ # bootstrap a user as admin
+ user_ref = self.new_user_ref(domain_id=self.domain['id'])
+ password = user_ref['password']
+ user_ref = self.identity_api.create_user(user_ref)
+
+ # administrative password reset
+ new_password = uuid.uuid4().hex
+ self.patch('/users/%s' % user_ref['id'],
+ body={'user': {'password': new_password}},
+ expected_status=200)
+
+ self.assertNotIn(password, log_fix.output)
+ self.assertNotIn(new_password, log_fix.output)
+
class IdentityV3toV2MethodsTestCase(tests.TestCase):
"""Test users V3 to V2 conversion methods."""
@@ -444,27 +478,26 @@ class IdentityV3toV2MethodsTestCase(tests.TestCase):
self.user_id = uuid.uuid4().hex
self.default_project_id = uuid.uuid4().hex
self.tenant_id = uuid.uuid4().hex
- self.domain_id = uuid.uuid4().hex
# User with only default_project_id in ref
self.user1 = {'id': self.user_id,
'name': self.user_id,
'default_project_id': self.default_project_id,
- 'domain_id': self.domain_id}
+ 'domain_id': CONF.identity.default_domain_id}
# User without default_project_id or tenantId in ref
self.user2 = {'id': self.user_id,
'name': self.user_id,
- 'domain_id': self.domain_id}
+ 'domain_id': CONF.identity.default_domain_id}
# User with both tenantId and default_project_id in ref
self.user3 = {'id': self.user_id,
'name': self.user_id,
'default_project_id': self.default_project_id,
'tenantId': self.tenant_id,
- 'domain_id': self.domain_id}
+ 'domain_id': CONF.identity.default_domain_id}
# User with only tenantId in ref
self.user4 = {'id': self.user_id,
'name': self.user_id,
'tenantId': self.tenant_id,
- 'domain_id': self.domain_id}
+ 'domain_id': CONF.identity.default_domain_id}
# Expected result if the user is meant to have a tenantId element
self.expected_user = {'id': self.user_id,
@@ -582,3 +615,18 @@ class UserSelfServiceChangingPasswordsTestCase(test_v3.RestfulTestCase):
self.change_password(password=uuid.uuid4().hex,
original_password=self.user_ref['password'],
expected_status=401)
+
+ def test_changing_password_not_logged(self):
+ # When a user changes their password, the password isn't logged at any
+ # level.
+
+ log_fix = self.useFixture(fixtures.FakeLogger(level=logging.DEBUG))
+
+ # change password
+ new_password = uuid.uuid4().hex
+ self.change_password(password=new_password,
+ original_password=self.user_ref['password'],
+ expected_status=204)
+
+ self.assertNotIn(self.user_ref['password'], log_fix.output)
+ self.assertNotIn(new_password, log_fix.output)
diff --git a/keystone-moon/keystone/tests/unit/test_v3_oauth1.py b/keystone-moon/keystone/tests/unit/test_v3_oauth1.py
index 608162d8..6c063c5e 100644
--- a/keystone-moon/keystone/tests/unit/test_v3_oauth1.py
+++ b/keystone-moon/keystone/tests/unit/test_v3_oauth1.py
@@ -241,16 +241,6 @@ class ConsumerCRUDTests(OAuth1Tests):
class OAuthFlowTests(OAuth1Tests):
- def auth_plugin_config_override(self):
- methods = ['password', 'token', 'oauth1']
- method_classes = {
- 'password': 'keystone.auth.plugins.password.Password',
- 'token': 'keystone.auth.plugins.token.Token',
- 'oauth1': 'keystone.auth.plugins.oauth1.OAuth',
- }
- super(OAuthFlowTests, self).auth_plugin_config_override(
- methods, **method_classes)
-
def test_oauth_flow(self):
consumer = self._create_single_consumer()
consumer_id = consumer['id']
diff --git a/keystone-moon/keystone/tests/unit/test_v3_os_revoke.py b/keystone-moon/keystone/tests/unit/test_v3_os_revoke.py
index 5710d973..48226cd4 100644
--- a/keystone-moon/keystone/tests/unit/test_v3_os_revoke.py
+++ b/keystone-moon/keystone/tests/unit/test_v3_os_revoke.py
@@ -17,6 +17,7 @@ from oslo_utils import timeutils
import six
from testtools import matchers
+from keystone.common import utils
from keystone.contrib.revoke import model
from keystone.tests.unit import test_v3
from keystone.token import provider
@@ -25,7 +26,7 @@ from keystone.token import provider
def _future_time_string():
expire_delta = datetime.timedelta(seconds=1000)
future_time = timeutils.utcnow() + expire_delta
- return timeutils.isotime(future_time)
+ return utils.isotime(future_time)
class OSRevokeTests(test_v3.RestfulTestCase, test_v3.JsonHomeTestMixin):
@@ -55,13 +56,13 @@ class OSRevokeTests(test_v3.RestfulTestCase, test_v3.JsonHomeTestMixin):
self.assertTrue(
before_time <= event_issued_before,
'invalid event issued_before time; %s is not later than %s.' % (
- timeutils.isotime(event_issued_before, subsecond=True),
- timeutils.isotime(before_time, subsecond=True)))
+ utils.isotime(event_issued_before, subsecond=True),
+ utils.isotime(before_time, subsecond=True)))
self.assertTrue(
event_issued_before <= after_time,
'invalid event issued_before time; %s is not earlier than %s.' % (
- timeutils.isotime(event_issued_before, subsecond=True),
- timeutils.isotime(after_time, subsecond=True)))
+ utils.isotime(event_issued_before, subsecond=True),
+ utils.isotime(after_time, subsecond=True)))
del (event['issued_before'])
self.assertEqual(sample, event)
@@ -76,7 +77,7 @@ class OSRevokeTests(test_v3.RestfulTestCase, test_v3.JsonHomeTestMixin):
expires_at = provider.default_expire_time()
sample = self._blank_event()
sample['user_id'] = six.text_type(user_id)
- sample['expires_at'] = six.text_type(timeutils.isotime(expires_at))
+ sample['expires_at'] = six.text_type(utils.isotime(expires_at))
before_time = timeutils.utcnow()
self.revoke_api.revoke_by_expiration(user_id, expires_at)
resp = self.get('/OS-REVOKE/events')
diff --git a/keystone-moon/keystone/tests/unit/test_v3_protection.py b/keystone-moon/keystone/tests/unit/test_v3_protection.py
index 2b2c96d1..458c61de 100644
--- a/keystone-moon/keystone/tests/unit/test_v3_protection.py
+++ b/keystone-moon/keystone/tests/unit/test_v3_protection.py
@@ -391,23 +391,18 @@ class IdentityTestPolicySample(test_v3.RestfulTestCase):
# Given a non-admin user token, the token can be used to validate
# itself.
# This is GET /v3/auth/tokens, with X-Auth-Token == X-Subject-Token
- # FIXME(blk-u): This test fails, a user can't validate their own token,
- # see bug 1421825.
auth = self.build_authentication_request(
user_id=self.just_a_user['id'],
password=self.just_a_user['password'])
token = self.get_requested_token(auth)
- # FIXME(blk-u): remove expected_status=403.
self.get('/auth/tokens', token=token,
- headers={'X-Subject-Token': token}, expected_status=403)
+ headers={'X-Subject-Token': token})
def test_user_validate_user_token(self):
# A user can validate one of their own tokens.
# This is GET /v3/auth/tokens
- # FIXME(blk-u): This test fails, a user can't validate their own token,
- # see bug 1421825.
auth = self.build_authentication_request(
user_id=self.just_a_user['id'],
@@ -415,9 +410,8 @@ class IdentityTestPolicySample(test_v3.RestfulTestCase):
token1 = self.get_requested_token(auth)
token2 = self.get_requested_token(auth)
- # FIXME(blk-u): remove expected_status=403.
self.get('/auth/tokens', token=token1,
- headers={'X-Subject-Token': token2}, expected_status=403)
+ headers={'X-Subject-Token': token2})
def test_user_validate_other_user_token_rejected(self):
# A user cannot validate another user's token.
@@ -458,23 +452,18 @@ class IdentityTestPolicySample(test_v3.RestfulTestCase):
# Given a non-admin user token, the token can be used to check
# itself.
# This is HEAD /v3/auth/tokens, with X-Auth-Token == X-Subject-Token
- # FIXME(blk-u): This test fails, a user can't check the same token,
- # see bug 1421825.
auth = self.build_authentication_request(
user_id=self.just_a_user['id'],
password=self.just_a_user['password'])
token = self.get_requested_token(auth)
- # FIXME(blk-u): change to expected_status=200
self.head('/auth/tokens', token=token,
- headers={'X-Subject-Token': token}, expected_status=403)
+ headers={'X-Subject-Token': token}, expected_status=200)
def test_user_check_user_token(self):
# A user can check one of their own tokens.
# This is HEAD /v3/auth/tokens
- # FIXME(blk-u): This test fails, a user can't check the same token,
- # see bug 1421825.
auth = self.build_authentication_request(
user_id=self.just_a_user['id'],
@@ -482,9 +471,8 @@ class IdentityTestPolicySample(test_v3.RestfulTestCase):
token1 = self.get_requested_token(auth)
token2 = self.get_requested_token(auth)
- # FIXME(blk-u): change to expected_status=200
self.head('/auth/tokens', token=token1,
- headers={'X-Subject-Token': token2}, expected_status=403)
+ headers={'X-Subject-Token': token2}, expected_status=200)
def test_user_check_other_user_token_rejected(self):
# A user cannot check another user's token.
@@ -526,23 +514,18 @@ class IdentityTestPolicySample(test_v3.RestfulTestCase):
# Given a non-admin user token, the token can be used to revoke
# itself.
# This is DELETE /v3/auth/tokens, with X-Auth-Token == X-Subject-Token
- # FIXME(blk-u): This test fails, a user can't revoke the same token,
- # see bug 1421825.
auth = self.build_authentication_request(
user_id=self.just_a_user['id'],
password=self.just_a_user['password'])
token = self.get_requested_token(auth)
- # FIXME(blk-u): remove expected_status=403
self.delete('/auth/tokens', token=token,
- headers={'X-Subject-Token': token}, expected_status=403)
+ headers={'X-Subject-Token': token})
def test_user_revoke_user_token(self):
# A user can revoke one of their own tokens.
# This is DELETE /v3/auth/tokens
- # FIXME(blk-u): This test fails, a user can't revoke the same token,
- # see bug 1421825.
auth = self.build_authentication_request(
user_id=self.just_a_user['id'],
@@ -550,9 +533,8 @@ class IdentityTestPolicySample(test_v3.RestfulTestCase):
token1 = self.get_requested_token(auth)
token2 = self.get_requested_token(auth)
- # FIXME(blk-u): remove expected_status=403
self.delete('/auth/tokens', token=token1,
- headers={'X-Subject-Token': token2}, expected_status=403)
+ headers={'X-Subject-Token': token2})
def test_user_revoke_other_user_token_rejected(self):
# A user cannot revoke another user's token.
@@ -591,7 +573,8 @@ class IdentityTestPolicySample(test_v3.RestfulTestCase):
headers={'X-Subject-Token': user_token})
-class IdentityTestv3CloudPolicySample(test_v3.RestfulTestCase):
+class IdentityTestv3CloudPolicySample(test_v3.RestfulTestCase,
+ test_v3.AssignmentTestMixin):
"""Test policy enforcement of the sample v3 cloud policy file."""
def setUp(self):
@@ -905,6 +888,141 @@ class IdentityTestv3CloudPolicySample(test_v3.RestfulTestCase):
self._test_grants('projects', self.project['id'])
+ def test_cloud_admin_list_assignments_of_domain(self):
+ self.auth = self.build_authentication_request(
+ user_id=self.cloud_admin_user['id'],
+ password=self.cloud_admin_user['password'],
+ domain_id=self.admin_domain['id'])
+
+ collection_url = self.build_role_assignment_query_url(
+ domain_id=self.domainA['id'])
+ r = self.get(collection_url, auth=self.auth)
+ self.assertValidRoleAssignmentListResponse(
+ r, expected_length=2, resource_url=collection_url)
+
+ domainA_admin_entity = self.build_role_assignment_entity(
+ domain_id=self.domainA['id'],
+ user_id=self.domain_admin_user['id'],
+ role_id=self.admin_role['id'],
+ inherited_to_projects=False)
+ domainA_user_entity = self.build_role_assignment_entity(
+ domain_id=self.domainA['id'],
+ user_id=self.just_a_user['id'],
+ role_id=self.role['id'],
+ inherited_to_projects=False)
+
+ self.assertRoleAssignmentInListResponse(r, domainA_admin_entity)
+ self.assertRoleAssignmentInListResponse(r, domainA_user_entity)
+
+ def test_domain_admin_list_assignments_of_domain(self):
+ self.auth = self.build_authentication_request(
+ user_id=self.domain_admin_user['id'],
+ password=self.domain_admin_user['password'],
+ domain_id=self.domainA['id'])
+
+ collection_url = self.build_role_assignment_query_url(
+ domain_id=self.domainA['id'])
+ r = self.get(collection_url, auth=self.auth)
+ self.assertValidRoleAssignmentListResponse(
+ r, expected_length=2, resource_url=collection_url)
+
+ domainA_admin_entity = self.build_role_assignment_entity(
+ domain_id=self.domainA['id'],
+ user_id=self.domain_admin_user['id'],
+ role_id=self.admin_role['id'],
+ inherited_to_projects=False)
+ domainA_user_entity = self.build_role_assignment_entity(
+ domain_id=self.domainA['id'],
+ user_id=self.just_a_user['id'],
+ role_id=self.role['id'],
+ inherited_to_projects=False)
+
+ self.assertRoleAssignmentInListResponse(r, domainA_admin_entity)
+ self.assertRoleAssignmentInListResponse(r, domainA_user_entity)
+
+ def test_domain_admin_list_assignments_of_another_domain_failed(self):
+ self.auth = self.build_authentication_request(
+ user_id=self.domain_admin_user['id'],
+ password=self.domain_admin_user['password'],
+ domain_id=self.domainA['id'])
+
+ collection_url = self.build_role_assignment_query_url(
+ domain_id=self.domainB['id'])
+ self.get(collection_url, auth=self.auth, expected_status=403)
+
+ def test_domain_user_list_assignments_of_domain_failed(self):
+ self.auth = self.build_authentication_request(
+ user_id=self.just_a_user['id'],
+ password=self.just_a_user['password'],
+ domain_id=self.domainA['id'])
+
+ collection_url = self.build_role_assignment_query_url(
+ domain_id=self.domainA['id'])
+ self.get(collection_url, auth=self.auth, expected_status=403)
+
+ def test_cloud_admin_list_assignments_of_project(self):
+ self.auth = self.build_authentication_request(
+ user_id=self.cloud_admin_user['id'],
+ password=self.cloud_admin_user['password'],
+ domain_id=self.admin_domain['id'])
+
+ collection_url = self.build_role_assignment_query_url(
+ project_id=self.project['id'])
+ r = self.get(collection_url, auth=self.auth)
+ self.assertValidRoleAssignmentListResponse(
+ r, expected_length=2, resource_url=collection_url)
+
+ project_admin_entity = self.build_role_assignment_entity(
+ project_id=self.project['id'],
+ user_id=self.project_admin_user['id'],
+ role_id=self.admin_role['id'],
+ inherited_to_projects=False)
+ project_user_entity = self.build_role_assignment_entity(
+ project_id=self.project['id'],
+ user_id=self.just_a_user['id'],
+ role_id=self.role['id'],
+ inherited_to_projects=False)
+
+ self.assertRoleAssignmentInListResponse(r, project_admin_entity)
+ self.assertRoleAssignmentInListResponse(r, project_user_entity)
+
+ @tests.utils.wip('waiting on bug #1437407')
+ def test_domain_admin_list_assignments_of_project(self):
+ self.auth = self.build_authentication_request(
+ user_id=self.domain_admin_user['id'],
+ password=self.domain_admin_user['password'],
+ domain_id=self.domainA['id'])
+
+ collection_url = self.build_role_assignment_query_url(
+ project_id=self.project['id'])
+ r = self.get(collection_url, auth=self.auth)
+ self.assertValidRoleAssignmentListResponse(
+ r, expected_length=2, resource_url=collection_url)
+
+ project_admin_entity = self.build_role_assignment_entity(
+ project_id=self.project['id'],
+ user_id=self.project_admin_user['id'],
+ role_id=self.admin_role['id'],
+ inherited_to_projects=False)
+ project_user_entity = self.build_role_assignment_entity(
+ project_id=self.project['id'],
+ user_id=self.just_a_user['id'],
+ role_id=self.role['id'],
+ inherited_to_projects=False)
+
+ self.assertRoleAssignmentInListResponse(r, project_admin_entity)
+ self.assertRoleAssignmentInListResponse(r, project_user_entity)
+
+ def test_domain_user_list_assignments_of_project_failed(self):
+ self.auth = self.build_authentication_request(
+ user_id=self.just_a_user['id'],
+ password=self.just_a_user['password'],
+ domain_id=self.domainA['id'])
+
+ collection_url = self.build_role_assignment_query_url(
+ project_id=self.project['id'])
+ self.get(collection_url, auth=self.auth, expected_status=403)
+
def test_cloud_admin(self):
self.auth = self.build_authentication_request(
user_id=self.domain_admin_user['id'],
@@ -921,6 +1039,14 @@ class IdentityTestv3CloudPolicySample(test_v3.RestfulTestCase):
self._test_domain_management()
+ def test_domain_admin_get_domain(self):
+ self.auth = self.build_authentication_request(
+ user_id=self.domain_admin_user['id'],
+ password=self.domain_admin_user['password'],
+ domain_id=self.domainA['id'])
+ entity_url = '/domains/%s' % self.domainA['id']
+ self.get(entity_url, auth=self.auth, expected_status=200)
+
def test_list_user_credentials(self):
self.credential_user = self.new_credential_ref(self.just_a_user['id'])
self.credential_api.create_credential(self.credential_user['id'],
@@ -982,23 +1108,18 @@ class IdentityTestv3CloudPolicySample(test_v3.RestfulTestCase):
# Given a non-admin user token, the token can be used to validate
# itself.
# This is GET /v3/auth/tokens, with X-Auth-Token == X-Subject-Token
- # FIXME(blk-u): This test fails, a user can't validate their own token,
- # see bug 1421825.
auth = self.build_authentication_request(
user_id=self.just_a_user['id'],
password=self.just_a_user['password'])
token = self.get_requested_token(auth)
- # FIXME(blk-u): remove expected_status=403.
self.get('/auth/tokens', token=token,
- headers={'X-Subject-Token': token}, expected_status=403)
+ headers={'X-Subject-Token': token})
def test_user_validate_user_token(self):
# A user can validate one of their own tokens.
# This is GET /v3/auth/tokens
- # FIXME(blk-u): This test fails, a user can't validate their own token,
- # see bug 1421825.
auth = self.build_authentication_request(
user_id=self.just_a_user['id'],
@@ -1006,9 +1127,8 @@ class IdentityTestv3CloudPolicySample(test_v3.RestfulTestCase):
token1 = self.get_requested_token(auth)
token2 = self.get_requested_token(auth)
- # FIXME(blk-u): remove expected_status=403.
self.get('/auth/tokens', token=token1,
- headers={'X-Subject-Token': token2}, expected_status=403)
+ headers={'X-Subject-Token': token2})
def test_user_validate_other_user_token_rejected(self):
# A user cannot validate another user's token.
diff --git a/keystone-moon/keystone/tests/unit/test_validation.py b/keystone-moon/keystone/tests/unit/test_validation.py
index f83cabcb..f7a224a0 100644
--- a/keystone-moon/keystone/tests/unit/test_validation.py
+++ b/keystone-moon/keystone/tests/unit/test_validation.py
@@ -13,6 +13,7 @@
import uuid
+import six
import testtools
from keystone.assignment import schema as assignment_schema
@@ -24,8 +25,10 @@ from keystone.contrib.endpoint_filter import schema as endpoint_filter_schema
from keystone.contrib.federation import schema as federation_schema
from keystone.credential import schema as credential_schema
from keystone import exception
+from keystone.identity import schema as identity_schema
from keystone.policy import schema as policy_schema
from keystone.resource import schema as resource_schema
+from keystone.tests import unit
from keystone.trust import schema as trust_schema
"""Example model to validate create requests against. Assume that this is
@@ -96,7 +99,84 @@ _VALID_FILTERS = [{'interface': 'admin'},
_INVALID_FILTERS = ['some string', 1, 0, True, False]
-class EntityValidationTestCase(testtools.TestCase):
+class ValidatedDecoratorTests(unit.BaseTestCase):
+
+ entity_schema = {
+ 'type': 'object',
+ 'properties': {
+ 'name': parameter_types.name,
+ },
+ 'required': ['name'],
+ }
+
+ valid_entity = {
+ 'name': uuid.uuid4().hex,
+ }
+
+ invalid_entity = {}
+
+ @validation.validated(entity_schema, 'entity')
+ def do_something(self, entity):
+ pass
+
+ @validation.validated(entity_create, 'entity')
+ def create_entity(self, entity):
+ pass
+
+ @validation.validated(entity_update, 'entity')
+ def update_entity(self, entity_id, entity):
+ pass
+
+ def _assert_call_entity_method_fails(self, method, *args, **kwargs):
+ e = self.assertRaises(exception.ValidationError, method,
+ *args, **kwargs)
+
+ self.assertIn('Expecting to find entity in request body',
+ six.text_type(e))
+
+ def test_calling_with_valid_entity_kwarg_succeeds(self):
+ self.do_something(entity=self.valid_entity)
+
+ def test_calling_with_invalid_entity_kwarg_fails(self):
+ self.assertRaises(exception.ValidationError,
+ self.do_something,
+ entity=self.invalid_entity)
+
+ def test_calling_with_valid_entity_arg_succeeds(self):
+ self.do_something(self.valid_entity)
+
+ def test_calling_with_invalid_entity_arg_fails(self):
+ self.assertRaises(exception.ValidationError,
+ self.do_something,
+ self.invalid_entity)
+
+ def test_using_the_wrong_name_with_the_decorator_fails(self):
+ with testtools.ExpectedException(TypeError):
+ @validation.validated(self.entity_schema, 'entity_')
+ def function(entity):
+ pass
+
+ def test_create_entity_no_request_body_with_decorator(self):
+ """Test the case when request body is not provided."""
+ self._assert_call_entity_method_fails(self.create_entity)
+
+ def test_create_entity_empty_request_body_with_decorator(self):
+ """Test the case when client passing in an empty entity reference."""
+ self._assert_call_entity_method_fails(self.create_entity, entity={})
+
+ def test_update_entity_no_request_body_with_decorator(self):
+ """Test the case when request body is not provided."""
+ self._assert_call_entity_method_fails(self.update_entity,
+ uuid.uuid4().hex)
+
+ def test_update_entity_empty_request_body_with_decorator(self):
+ """Test the case when client passing in an empty entity reference."""
+ self._assert_call_entity_method_fails(self.update_entity,
+ uuid.uuid4().hex,
+ entity={})
+
+
+class EntityValidationTestCase(unit.BaseTestCase):
def setUp(self):
super(EntityValidationTestCase, self).setUp()
@@ -226,7 +306,7 @@ class EntityValidationTestCase(testtools.TestCase):
def test_create_entity_with_invalid_id_strings(self):
"""Exception raised when using invalid id strings."""
long_string = 'A' * 65
- invalid_id_strings = ['', long_string, 'this,should,fail']
+ invalid_id_strings = ['', long_string]
for invalid_id in invalid_id_strings:
request_to_validate = {'name': self.resource_name,
'id_string': invalid_id}
@@ -299,7 +379,7 @@ class EntityValidationTestCase(testtools.TestCase):
request_to_validate)
-class ProjectValidationTestCase(testtools.TestCase):
+class ProjectValidationTestCase(unit.BaseTestCase):
"""Test for V3 Project API validation."""
def setUp(self):
@@ -426,7 +506,7 @@ class ProjectValidationTestCase(testtools.TestCase):
request_to_validate)
-class DomainValidationTestCase(testtools.TestCase):
+class DomainValidationTestCase(unit.BaseTestCase):
"""Test for V3 Domain API validation."""
def setUp(self):
@@ -524,7 +604,7 @@ class DomainValidationTestCase(testtools.TestCase):
request_to_validate)
-class RoleValidationTestCase(testtools.TestCase):
+class RoleValidationTestCase(unit.BaseTestCase):
"""Test for V3 Role API validation."""
def setUp(self):
@@ -578,7 +658,7 @@ class RoleValidationTestCase(testtools.TestCase):
request_to_validate)
-class PolicyValidationTestCase(testtools.TestCase):
+class PolicyValidationTestCase(unit.BaseTestCase):
"""Test for V3 Policy API validation."""
def setUp(self):
@@ -653,7 +733,7 @@ class PolicyValidationTestCase(testtools.TestCase):
request_to_validate)
-class CredentialValidationTestCase(testtools.TestCase):
+class CredentialValidationTestCase(unit.BaseTestCase):
"""Test for V3 Credential API validation."""
def setUp(self):
@@ -770,7 +850,7 @@ class CredentialValidationTestCase(testtools.TestCase):
self.update_credential_validator.validate(request_to_validate)
-class RegionValidationTestCase(testtools.TestCase):
+class RegionValidationTestCase(unit.BaseTestCase):
"""Test for V3 Region API validation."""
def setUp(self):
@@ -804,6 +884,14 @@ class RegionValidationTestCase(testtools.TestCase):
'parent_region_id': uuid.uuid4().hex}
self.create_region_validator.validate(request_to_validate)
+ def test_validate_region_create_fails_with_invalid_region_id(self):
+ """Exception raised when passing invalid `id` in request."""
+ request_to_validate = {'id': 1234,
+ 'description': 'US East Region'}
+ self.assertRaises(exception.SchemaValidationError,
+ self.create_region_validator.validate,
+ request_to_validate)
+
def test_validate_region_create_succeeds_with_extra_parameters(self):
"""Validate create region request with extra values."""
request_to_validate = {'other_attr': uuid.uuid4().hex}
@@ -830,7 +918,7 @@ class RegionValidationTestCase(testtools.TestCase):
request_to_validate)
-class ServiceValidationTestCase(testtools.TestCase):
+class ServiceValidationTestCase(unit.BaseTestCase):
"""Test for V3 Service API validation."""
def setUp(self):
@@ -985,7 +1073,7 @@ class ServiceValidationTestCase(testtools.TestCase):
request_to_validate)
-class EndpointValidationTestCase(testtools.TestCase):
+class EndpointValidationTestCase(unit.BaseTestCase):
"""Test for V3 Endpoint API validation."""
def setUp(self):
@@ -1096,6 +1184,26 @@ class EndpointValidationTestCase(testtools.TestCase):
self.create_endpoint_validator.validate,
request_to_validate)
+ def test_validate_endpoint_create_fails_with_invalid_region_id(self):
+ """Exception raised when passing invalid `region(_id)` in request."""
+ request_to_validate = {'interface': 'admin',
+ 'region_id': 1234,
+ 'service_id': uuid.uuid4().hex,
+ 'url': 'https://service.example.com:5000/'}
+
+ self.assertRaises(exception.SchemaValidationError,
+ self.create_endpoint_validator.validate,
+ request_to_validate)
+
+ request_to_validate = {'interface': 'admin',
+ 'region': 1234,
+ 'service_id': uuid.uuid4().hex,
+ 'url': 'https://service.example.com:5000/'}
+
+ self.assertRaises(exception.SchemaValidationError,
+ self.create_endpoint_validator.validate,
+ request_to_validate)
+
def test_validate_endpoint_update_fails_with_invalid_enabled(self):
"""Exception raised when `enabled` is boolean-like value."""
for invalid_enabled in _INVALID_ENABLED_FORMATS:
@@ -1163,8 +1271,28 @@ class EndpointValidationTestCase(testtools.TestCase):
self.update_endpoint_validator.validate,
request_to_validate)
+ def test_validate_endpoint_update_fails_with_invalid_region_id(self):
+ """Exception raised when passing invalid `region(_id)` in request."""
+ request_to_validate = {'interface': 'admin',
+ 'region_id': 1234,
+ 'service_id': uuid.uuid4().hex,
+ 'url': 'https://service.example.com:5000/'}
-class EndpointGroupValidationTestCase(testtools.TestCase):
+ self.assertRaises(exception.SchemaValidationError,
+ self.update_endpoint_validator.validate,
+ request_to_validate)
+
+ request_to_validate = {'interface': 'admin',
+ 'region': 1234,
+ 'service_id': uuid.uuid4().hex,
+ 'url': 'https://service.example.com:5000/'}
+
+ self.assertRaises(exception.SchemaValidationError,
+ self.update_endpoint_validator.validate,
+ request_to_validate)
+
+
+class EndpointGroupValidationTestCase(unit.BaseTestCase):
"""Test for V3 Endpoint Group API validation."""
def setUp(self):
@@ -1269,7 +1397,7 @@ class EndpointGroupValidationTestCase(testtools.TestCase):
request_to_validate)
-class TrustValidationTestCase(testtools.TestCase):
+class TrustValidationTestCase(unit.BaseTestCase):
"""Test for V3 Trust API validation."""
_valid_roles = ['member', uuid.uuid4().hex, str(uuid.uuid4())]
@@ -1360,6 +1488,13 @@ class TrustValidationTestCase(testtools.TestCase):
'remaining_uses': 2}
self.create_trust_validator.validate(request_to_validate)
+ def test_validate_trust_with_period_in_user_id_string(self):
+ """Validate trust request with a period in the user id string."""
+ request_to_validate = {'trustor_user_id': 'john.smith',
+ 'trustee_user_id': 'joe.developer',
+ 'impersonation': False}
+ self.create_trust_validator.validate(request_to_validate)
+
def test_validate_trust_with_invalid_expires_at_fails(self):
"""Validate trust request with invalid `expires_at` fails."""
request_to_validate = {'trustor_user_id': uuid.uuid4().hex,
@@ -1399,7 +1534,7 @@ class TrustValidationTestCase(testtools.TestCase):
self.create_trust_validator.validate(request_to_validate)
-class ServiceProviderValidationTestCase(testtools.TestCase):
+class ServiceProviderValidationTestCase(unit.BaseTestCase):
"""Test for V3 Service Provider API validation."""
def setUp(self):
@@ -1561,3 +1696,182 @@ class ServiceProviderValidationTestCase(testtools.TestCase):
self.assertRaises(exception.SchemaValidationError,
self.update_sp_validator.validate,
request_to_validate)
+
+
+class UserValidationTestCase(unit.BaseTestCase):
+ """Test for V3 User API validation."""
+
+ def setUp(self):
+ super(UserValidationTestCase, self).setUp()
+
+ self.user_name = uuid.uuid4().hex
+
+ create = identity_schema.user_create
+ update = identity_schema.user_update
+ self.create_user_validator = validators.SchemaValidator(create)
+ self.update_user_validator = validators.SchemaValidator(update)
+
+ def test_validate_user_create_request_succeeds(self):
+ """Test that validating a user create request succeeds."""
+ request_to_validate = {'name': self.user_name}
+ self.create_user_validator.validate(request_to_validate)
+
+ def test_validate_user_create_with_all_valid_parameters_succeeds(self):
+ """Test that validating a user create request succeeds."""
+ request_to_validate = {'name': self.user_name,
+ 'default_project_id': uuid.uuid4().hex,
+ 'domain_id': uuid.uuid4().hex,
+ 'description': uuid.uuid4().hex,
+ 'enabled': True,
+ 'email': uuid.uuid4().hex,
+ 'password': uuid.uuid4().hex}
+ self.create_user_validator.validate(request_to_validate)
+
+ def test_validate_user_create_fails_without_name(self):
+ """Exception raised when validating a user without name."""
+ request_to_validate = {'email': uuid.uuid4().hex}
+ self.assertRaises(exception.SchemaValidationError,
+ self.create_user_validator.validate,
+ request_to_validate)
+
+ def test_validate_user_create_fails_with_name_of_zero_length(self):
+ """Exception raised when validating a username with length of zero."""
+ request_to_validate = {'name': ''}
+ self.assertRaises(exception.SchemaValidationError,
+ self.create_user_validator.validate,
+ request_to_validate)
+
+ def test_validate_user_create_fails_with_name_of_wrong_type(self):
+ """Exception raised when validating a username of wrong type."""
+ request_to_validate = {'name': True}
+ self.assertRaises(exception.SchemaValidationError,
+ self.create_user_validator.validate,
+ request_to_validate)
+
+ def test_validate_user_create_succeeds_with_valid_enabled_formats(self):
+ """Validate acceptable enabled formats in create user requests."""
+ for enabled in _VALID_ENABLED_FORMATS:
+ request_to_validate = {'name': self.user_name,
+ 'enabled': enabled}
+ self.create_user_validator.validate(request_to_validate)
+
+ def test_validate_user_create_fails_with_invalid_enabled_formats(self):
+ """Exception raised when enabled is not an acceptable format."""
+ for invalid_enabled in _INVALID_ENABLED_FORMATS:
+ request_to_validate = {'name': self.user_name,
+ 'enabled': invalid_enabled}
+ self.assertRaises(exception.SchemaValidationError,
+ self.create_user_validator.validate,
+ request_to_validate)
+
+ def test_validate_user_create_succeeds_with_extra_attributes(self):
+ """Validate extra parameters on user create requests."""
+ request_to_validate = {'name': self.user_name,
+ 'other_attr': uuid.uuid4().hex}
+ self.create_user_validator.validate(request_to_validate)
+
+ def test_validate_user_create_succeeds_with_password_of_zero_length(self):
+ """Validate empty password on user create requests."""
+ request_to_validate = {'name': self.user_name,
+ 'password': ''}
+ self.create_user_validator.validate(request_to_validate)
+
+ def test_validate_user_create_succeeds_with_null_password(self):
+ """Validate that password is nullable on create user."""
+ request_to_validate = {'name': self.user_name,
+ 'password': None}
+ self.create_user_validator.validate(request_to_validate)
+
+ def test_validate_user_create_fails_with_invalid_password_type(self):
+ """Exception raised when user password is of the wrong type."""
+ request_to_validate = {'name': self.user_name,
+ 'password': True}
+ self.assertRaises(exception.SchemaValidationError,
+ self.create_user_validator.validate,
+ request_to_validate)
+
+ def test_validate_user_create_succeeds_with_null_description(self):
+ """Validate that description can be nullable on create user."""
+ request_to_validate = {'name': self.user_name,
+ 'description': None}
+ self.create_user_validator.validate(request_to_validate)
+
+ def test_validate_user_update_succeeds(self):
+ """Validate an update user request."""
+ request_to_validate = {'email': uuid.uuid4().hex}
+ self.update_user_validator.validate(request_to_validate)
+
+ def test_validate_user_update_fails_with_no_parameters(self):
+ """Exception raised when updating nothing."""
+ request_to_validate = {}
+ self.assertRaises(exception.SchemaValidationError,
+ self.update_user_validator.validate,
+ request_to_validate)
+
+ def test_validate_user_update_succeeds_with_extra_parameters(self):
+ """Validate user update requests with extra parameters."""
+ request_to_validate = {'other_attr': uuid.uuid4().hex}
+ self.update_user_validator.validate(request_to_validate)
+
+
+class GroupValidationTestCase(unit.BaseTestCase):
+ """Test for V3 Group API validation."""
+
+ def setUp(self):
+ super(GroupValidationTestCase, self).setUp()
+
+ self.group_name = uuid.uuid4().hex
+
+ create = identity_schema.group_create
+ update = identity_schema.group_update
+ self.create_group_validator = validators.SchemaValidator(create)
+ self.update_group_validator = validators.SchemaValidator(update)
+
+ def test_validate_group_create_succeeds(self):
+ """Validate create group requests."""
+ request_to_validate = {'name': self.group_name}
+ self.create_group_validator.validate(request_to_validate)
+
+ def test_validate_group_create_succeeds_with_all_parameters(self):
+ """Validate create group requests with all parameters."""
+ request_to_validate = {'name': self.group_name,
+ 'description': uuid.uuid4().hex,
+ 'domain_id': uuid.uuid4().hex}
+ self.create_group_validator.validate(request_to_validate)
+
+ def test_validate_group_create_fails_without_group_name(self):
+ """Exception raised when group name is not provided in request."""
+ request_to_validate = {'description': uuid.uuid4().hex}
+ self.assertRaises(exception.SchemaValidationError,
+ self.create_group_validator.validate,
+ request_to_validate)
+
+ def test_validate_group_create_fails_when_group_name_is_too_short(self):
+ """Exception raised when group name is equal to zero."""
+ request_to_validate = {'name': ''}
+ self.assertRaises(exception.SchemaValidationError,
+ self.create_group_validator.validate,
+ request_to_validate)
+
+ def test_validate_group_create_succeeds_with_extra_parameters(self):
+ """Validate extra attributes on group create requests."""
+ request_to_validate = {'name': self.group_name,
+ 'other_attr': uuid.uuid4().hex}
+ self.create_group_validator.validate(request_to_validate)
+
+ def test_validate_group_update_succeeds(self):
+ """Validate group update requests."""
+ request_to_validate = {'description': uuid.uuid4().hex}
+ self.update_group_validator.validate(request_to_validate)
+
+ def test_validate_group_update_fails_with_no_parameters(self):
+ """Exception raised when no parameters passed in on update."""
+ request_to_validate = {}
+ self.assertRaises(exception.SchemaValidationError,
+ self.update_group_validator.validate,
+ request_to_validate)
+
+ def test_validate_group_update_succeeds_with_extra_parameters(self):
+ """Validate group update requests with extra parameters."""
+ request_to_validate = {'other_attr': uuid.uuid4().hex}
+ self.update_group_validator.validate(request_to_validate)
diff --git a/keystone-moon/keystone/tests/unit/test_versions.py b/keystone-moon/keystone/tests/unit/test_versions.py
index 6fe692ad..7f722f94 100644
--- a/keystone-moon/keystone/tests/unit/test_versions.py
+++ b/keystone-moon/keystone/tests/unit/test_versions.py
@@ -25,6 +25,7 @@ from testtools import matchers as tt_matchers
from keystone.common import json_home
from keystone import controllers
from keystone.tests import unit as tests
+from keystone.tests.unit import utils
CONF = cfg.CONF
@@ -71,9 +72,9 @@ v3_MEDIA_TYPES = [
]
v3_EXPECTED_RESPONSE = {
- "id": "v3.0",
+ "id": "v3.4",
"status": "stable",
- "updated": "2013-03-06T00:00:00Z",
+ "updated": "2015-03-30T00:00:00Z",
"links": [
{
"rel": "self",
@@ -161,7 +162,8 @@ ENDPOINT_GROUP_ID_PARAMETER_RELATION = (
BASE_IDP_PROTOCOL = '/OS-FEDERATION/identity_providers/{idp_id}/protocols'
BASE_EP_POLICY = '/policies/{policy_id}/OS-ENDPOINT-POLICY'
-BASE_EP_FILTER = '/OS-EP-FILTER/endpoint_groups/{endpoint_group_id}'
+BASE_EP_FILTER_PREFIX = '/OS-EP-FILTER'
+BASE_EP_FILTER = BASE_EP_FILTER_PREFIX + '/endpoint_groups/{endpoint_group_id}'
BASE_ACCESS_TOKEN = (
'/users/{user_id}/OS-OAUTH1/access_tokens/{access_token_id}')
@@ -352,6 +354,8 @@ V3_JSON_HOME_RESOURCES_INHERIT_DISABLED = {
'href': '/OS-FEDERATION/projects'},
_build_federation_rel(resource_name='saml2'): {
'href': '/auth/OS-FEDERATION/saml2'},
+ _build_federation_rel(resource_name='ecp'): {
+ 'href': '/auth/OS-FEDERATION/saml2/ecp'},
_build_federation_rel(resource_name='metadata'): {
'href': '/OS-FEDERATION/saml2/metadata'},
_build_federation_rel(resource_name='identity_providers'): {
@@ -474,6 +478,12 @@ V3_JSON_HOME_RESOURCES_INHERIT_DISABLED = {
'href-template': BASE_EP_FILTER + '/endpoints',
'href-vars': {'endpoint_group_id':
ENDPOINT_GROUP_ID_PARAMETER_RELATION, }},
+ _build_ep_filter_rel(resource_name='project_endpoint_groups'):
+ {
+ 'href-template': (BASE_EP_FILTER_PREFIX + '/projects/{project_id}' +
+ '/endpoint_groups'),
+ 'href-vars': {'project_id':
+ json_home.Parameters.PROJECT_ID, }},
_build_ep_filter_rel(resource_name='project_endpoint'):
{
'href-template': ('/OS-EP-FILTER/projects/{project_id}'
@@ -635,9 +645,11 @@ class VersionTestCase(tests.TestCase):
def config_overrides(self):
super(VersionTestCase, self).config_overrides()
- port = random.randint(10000, 30000)
- self.config_fixture.config(group='eventlet_server', public_port=port,
- admin_port=port)
+ admin_port = random.randint(10000, 30000)
+ public_port = random.randint(40000, 60000)
+ self.config_fixture.config(group='eventlet_server',
+ public_port=public_port,
+ admin_port=admin_port)
def _paste_in_port(self, response, port):
for link in response['links']:
@@ -651,7 +663,7 @@ class VersionTestCase(tests.TestCase):
data = jsonutils.loads(resp.body)
expected = VERSIONS_RESPONSE
for version in expected['versions']['values']:
- if version['id'] == 'v3.0':
+ if version['id'].startswith('v3'):
self._paste_in_port(
version, 'http://localhost:%s/v3/' %
CONF.eventlet_server.public_port)
@@ -668,7 +680,7 @@ class VersionTestCase(tests.TestCase):
data = jsonutils.loads(resp.body)
expected = VERSIONS_RESPONSE
for version in expected['versions']['values']:
- if version['id'] == 'v3.0':
+ if version['id'].startswith('v3'):
self._paste_in_port(
version, 'http://localhost:%s/v3/' %
CONF.eventlet_server.admin_port)
@@ -689,7 +701,7 @@ class VersionTestCase(tests.TestCase):
expected = VERSIONS_RESPONSE
for version in expected['versions']['values']:
# localhost happens to be the site url for tests
- if version['id'] == 'v3.0':
+ if version['id'].startswith('v3'):
self._paste_in_port(
version, 'http://localhost/v3/')
elif version['id'] == 'v2.0':
@@ -741,8 +753,9 @@ class VersionTestCase(tests.TestCase):
CONF.eventlet_server.public_port)
self.assertEqual(expected, data)
+ @utils.wip('waiting on bug #1381961')
def test_admin_version_v3(self):
- client = tests.TestClient(self.public_app)
+ client = tests.TestClient(self.admin_app)
resp = client.get('/v3/')
self.assertEqual(200, resp.status_int)
data = jsonutils.loads(resp.body)
@@ -931,9 +944,11 @@ class VersionSingleAppTestCase(tests.TestCase):
def config_overrides(self):
super(VersionSingleAppTestCase, self).config_overrides()
- port = random.randint(10000, 30000)
- self.config_fixture.config(group='eventlet_server', public_port=port,
- admin_port=port)
+ admin_port = random.randint(10000, 30000)
+ public_port = random.randint(40000, 60000)
+ self.config_fixture.config(group='eventlet_server',
+ public_port=public_port,
+ admin_port=admin_port)
def _paste_in_port(self, response, port):
for link in response['links']:
@@ -941,6 +956,11 @@ class VersionSingleAppTestCase(tests.TestCase):
link['href'] = port
def _test_version(self, app_name):
+ def app_port():
+ if app_name == 'admin':
+ return CONF.eventlet_server.admin_port
+ else:
+ return CONF.eventlet_server.public_port
app = self.loadapp('keystone', app_name)
client = tests.TestClient(app)
resp = client.get('/')
@@ -948,14 +968,12 @@ class VersionSingleAppTestCase(tests.TestCase):
data = jsonutils.loads(resp.body)
expected = VERSIONS_RESPONSE
for version in expected['versions']['values']:
- if version['id'] == 'v3.0':
+ if version['id'].startswith('v3'):
self._paste_in_port(
- version, 'http://localhost:%s/v3/' %
- CONF.eventlet_server.public_port)
+ version, 'http://localhost:%s/v3/' % app_port())
elif version['id'] == 'v2.0':
self._paste_in_port(
- version, 'http://localhost:%s/v2.0/' %
- CONF.eventlet_server.public_port)
+ version, 'http://localhost:%s/v2.0/' % app_port())
self.assertThat(data, _VersionsEqual(expected))
def test_public(self):
@@ -978,9 +996,11 @@ class VersionInheritEnabledTestCase(tests.TestCase):
def config_overrides(self):
super(VersionInheritEnabledTestCase, self).config_overrides()
- port = random.randint(10000, 30000)
- self.config_fixture.config(group='eventlet_server', public_port=port,
- admin_port=port)
+ admin_port = random.randint(10000, 30000)
+ public_port = random.randint(40000, 60000)
+ self.config_fixture.config(group='eventlet_server',
+ public_port=public_port,
+ admin_port=admin_port)
self.config_fixture.config(group='os_inherit', enabled=True)
@@ -1021,7 +1041,7 @@ class VersionBehindSslTestCase(tests.TestCase):
def _get_expected(self, host):
expected = VERSIONS_RESPONSE
for version in expected['versions']['values']:
- if version['id'] == 'v3.0':
+ if version['id'].startswith('v3'):
self._paste_in_port(version, host + 'v3/')
elif version['id'] == 'v2.0':
self._paste_in_port(version, host + 'v2.0/')
diff --git a/keystone-moon/keystone/tests/unit/test_wsgi.py b/keystone-moon/keystone/tests/unit/test_wsgi.py
index 1785dd00..62156bd5 100644
--- a/keystone-moon/keystone/tests/unit/test_wsgi.py
+++ b/keystone-moon/keystone/tests/unit/test_wsgi.py
@@ -1,3 +1,5 @@
+# encoding: utf-8
+#
# Copyright 2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -16,6 +18,7 @@ import gettext
import socket
import uuid
+import eventlet
import mock
import oslo_i18n
from oslo_serialization import jsonutils
@@ -49,6 +52,22 @@ class FakeAttributeCheckerApp(wsgi.Application):
self._require_attributes(ref, attr)
+class RouterTest(tests.TestCase):
+ def setUp(self):
+ self.router = wsgi.RoutersBase()
+ super(RouterTest, self).setUp()
+
+ def test_invalid_status(self):
+ fake_mapper = uuid.uuid4().hex
+ fake_controller = uuid.uuid4().hex
+ fake_path = uuid.uuid4().hex
+ fake_rel = uuid.uuid4().hex
+ self.assertRaises(exception.Error,
+ self.router._add_resource,
+ fake_mapper, fake_controller, fake_path, fake_rel,
+ status=uuid.uuid4().hex)
+
+
class BaseWSGITest(tests.TestCase):
def setUp(self):
self.app = FakeApp()
@@ -185,6 +204,26 @@ class ApplicationTest(BaseWSGITest):
self.assertEqual(401, resp.status_int)
+ def test_improperly_encoded_params(self):
+ class FakeApp(wsgi.Application):
+ def index(self, context):
+ return context['query_string']
+ # this is high bit set ASCII, copy & pasted from Windows.
+ # aka code page 1252. It is not valid UTF8.
+ req = self._make_request(url='/?name=nonexit%E8nt')
+ self.assertRaises(exception.ValidationError, req.get_response,
+ FakeApp())
+
+ def test_properly_encoded_params(self):
+ class FakeApp(wsgi.Application):
+ def index(self, context):
+ return context['query_string']
+ # nonexitènt encoded as UTF-8
+ req = self._make_request(url='/?name=nonexit%C3%A8nt')
+ resp = req.get_response(FakeApp())
+ self.assertEqual({'name': u'nonexit\xe8nt'},
+ jsonutils.loads(resp.body))
+
class ExtensionRouterTest(BaseWSGITest):
def test_extensionrouter_local_config(self):
@@ -425,3 +464,43 @@ class ServerTest(tests.TestCase):
1)
self.assertTrue(mock_listen.called)
+
+ def test_client_socket_timeout(self):
+ # mocking server method of eventlet.wsgi to check it is called with
+ # configured 'client_socket_timeout' value.
+ for socket_timeout in range(1, 10):
+ self.config_fixture.config(group='eventlet_server',
+ client_socket_timeout=socket_timeout)
+ server = environment.Server(mock.MagicMock(), host=self.host,
+ port=self.port)
+ with mock.patch.object(eventlet.wsgi, 'server') as mock_server:
+ fake_application = uuid.uuid4().hex
+ fake_socket = uuid.uuid4().hex
+ server._run(fake_application, fake_socket)
+ mock_server.assert_called_once_with(
+ fake_socket,
+ fake_application,
+ debug=mock.ANY,
+ socket_timeout=socket_timeout,
+ log=mock.ANY,
+ keepalive=mock.ANY)
+
+ def test_wsgi_keep_alive(self):
+ # mocking server method of eventlet.wsgi to check it is called with
+ # configured 'wsgi_keep_alive' value.
+ wsgi_keepalive = False
+ self.config_fixture.config(group='eventlet_server',
+ wsgi_keep_alive=wsgi_keepalive)
+
+ server = environment.Server(mock.MagicMock(), host=self.host,
+ port=self.port)
+ with mock.patch.object(eventlet.wsgi, 'server') as mock_server:
+ fake_application = uuid.uuid4().hex
+ fake_socket = uuid.uuid4().hex
+ server._run(fake_application, fake_socket)
+ mock_server.assert_called_once_with(fake_socket,
+ fake_application,
+ debug=mock.ANY,
+ socket_timeout=mock.ANY,
+ log=mock.ANY,
+ keepalive=wsgi_keepalive)
diff --git a/keystone-moon/keystone/tests/unit/tests/test_core.py b/keystone-moon/keystone/tests/unit/tests/test_core.py
index 86c91a8d..2de51c52 100644
--- a/keystone-moon/keystone/tests/unit/tests/test_core.py
+++ b/keystone-moon/keystone/tests/unit/tests/test_core.py
@@ -47,16 +47,7 @@ class TestTestCase(tests.TestCase):
lambda: warnings.warn('test sa warning error', exc.SAWarning),
matchers.raises(exc.SAWarning))
- def test_deprecations(self):
- # If any deprecation warnings occur during testing it's raised as
- # exception.
-
- def use_deprecated():
- # DeprecationWarning: BaseException.message has been deprecated as
- # of Python 2.6
- try:
- raise Exception('something')
- except Exception as e:
- e.message
-
- self.assertThat(use_deprecated, matchers.raises(DeprecationWarning))
+ def test_deprecation_warnings_are_raised_as_exceptions_in_tests(self):
+ self.assertThat(
+ lambda: warnings.warn('this is deprecated', DeprecationWarning),
+ matchers.raises(DeprecationWarning))
diff --git a/keystone-moon/keystone/tests/unit/token/test_fernet_provider.py b/keystone-moon/keystone/tests/unit/token/test_fernet_provider.py
index 23fc0214..4101369c 100644
--- a/keystone-moon/keystone/tests/unit/token/test_fernet_provider.py
+++ b/keystone-moon/keystone/tests/unit/token/test_fernet_provider.py
@@ -11,17 +11,21 @@
# under the License.
import datetime
+import hashlib
+import os
import uuid
from oslo_utils import timeutils
from keystone.common import config
+from keystone.common import utils
from keystone import exception
from keystone.tests import unit as tests
from keystone.tests.unit import ksfixtures
from keystone.token import provider
from keystone.token.providers import fernet
from keystone.token.providers.fernet import token_formatters
+from keystone.token.providers.fernet import utils as fernet_utils
CONF = config.CONF
@@ -33,21 +37,21 @@ class TestFernetTokenProvider(tests.TestCase):
self.useFixture(ksfixtures.KeyRepository(self.config_fixture))
self.provider = fernet.Provider()
- def test_get_token_id_raises_not_implemented(self):
- """Test that an exception is raised when calling _get_token_id."""
- token_data = {}
- self.assertRaises(exception.NotImplemented,
- self.provider._get_token_id, token_data)
+ def test_supports_bind_authentication_returns_false(self):
+ self.assertFalse(self.provider._supports_bind_authentication)
- def test_invalid_v3_token_raises_401(self):
+ def test_needs_persistence_returns_false(self):
+ self.assertFalse(self.provider.needs_persistence())
+
+ def test_invalid_v3_token_raises_404(self):
self.assertRaises(
- exception.Unauthorized,
+ exception.TokenNotFound,
self.provider.validate_v3_token,
uuid.uuid4().hex)
- def test_invalid_v2_token_raises_401(self):
+ def test_invalid_v2_token_raises_404(self):
self.assertRaises(
- exception.Unauthorized,
+ exception.TokenNotFound,
self.provider.validate_v2_token,
uuid.uuid4().hex)
@@ -69,7 +73,7 @@ class TestPayloads(tests.TestCase):
def test_time_string_to_int_conversions(self):
payload_cls = token_formatters.BasePayload
- expected_time_str = timeutils.isotime()
+ expected_time_str = utils.isotime(subsecond=True)
time_obj = timeutils.parse_isotime(expected_time_str)
expected_time_int = (
(timeutils.normalize_time(time_obj) -
@@ -86,7 +90,7 @@ class TestPayloads(tests.TestCase):
def test_unscoped_payload(self):
exp_user_id = uuid.uuid4().hex
exp_methods = ['password']
- exp_expires_at = timeutils.isotime(timeutils.utcnow())
+ exp_expires_at = utils.isotime(timeutils.utcnow(), subsecond=True)
exp_audit_ids = [provider.random_urlsafe_str()]
payload = token_formatters.UnscopedPayload.assemble(
@@ -104,7 +108,7 @@ class TestPayloads(tests.TestCase):
exp_user_id = uuid.uuid4().hex
exp_methods = ['password']
exp_project_id = uuid.uuid4().hex
- exp_expires_at = timeutils.isotime(timeutils.utcnow())
+ exp_expires_at = utils.isotime(timeutils.utcnow(), subsecond=True)
exp_audit_ids = [provider.random_urlsafe_str()]
payload = token_formatters.ProjectScopedPayload.assemble(
@@ -124,7 +128,7 @@ class TestPayloads(tests.TestCase):
exp_user_id = uuid.uuid4().hex
exp_methods = ['password']
exp_domain_id = uuid.uuid4().hex
- exp_expires_at = timeutils.isotime(timeutils.utcnow())
+ exp_expires_at = utils.isotime(timeutils.utcnow(), subsecond=True)
exp_audit_ids = [provider.random_urlsafe_str()]
payload = token_formatters.DomainScopedPayload.assemble(
@@ -144,7 +148,7 @@ class TestPayloads(tests.TestCase):
exp_user_id = uuid.uuid4().hex
exp_methods = ['password']
exp_domain_id = CONF.identity.default_domain_id
- exp_expires_at = timeutils.isotime(timeutils.utcnow())
+ exp_expires_at = utils.isotime(timeutils.utcnow(), subsecond=True)
exp_audit_ids = [provider.random_urlsafe_str()]
payload = token_formatters.DomainScopedPayload.assemble(
@@ -164,7 +168,128 @@ class TestPayloads(tests.TestCase):
exp_user_id = uuid.uuid4().hex
exp_methods = ['password']
exp_project_id = uuid.uuid4().hex
- exp_expires_at = timeutils.isotime(timeutils.utcnow())
+ exp_expires_at = utils.isotime(timeutils.utcnow(), subsecond=True)
+ exp_audit_ids = [provider.random_urlsafe_str()]
+ exp_trust_id = uuid.uuid4().hex
+
+ payload = token_formatters.TrustScopedPayload.assemble(
+ exp_user_id, exp_methods, exp_project_id, exp_expires_at,
+ exp_audit_ids, exp_trust_id)
+
+ (user_id, methods, project_id, expires_at, audit_ids, trust_id) = (
+ token_formatters.TrustScopedPayload.disassemble(payload))
+
+ self.assertEqual(exp_user_id, user_id)
+ self.assertEqual(exp_methods, methods)
+ self.assertEqual(exp_project_id, project_id)
+ self.assertEqual(exp_expires_at, expires_at)
+ self.assertEqual(exp_audit_ids, audit_ids)
+ self.assertEqual(exp_trust_id, trust_id)
+
+ def test_unscoped_payload_with_non_uuid_user_id(self):
+ exp_user_id = 'someNonUuidUserId'
+ exp_methods = ['password']
+ exp_expires_at = utils.isotime(timeutils.utcnow(), subsecond=True)
+ exp_audit_ids = [provider.random_urlsafe_str()]
+
+ payload = token_formatters.UnscopedPayload.assemble(
+ exp_user_id, exp_methods, exp_expires_at, exp_audit_ids)
+
+ (user_id, methods, expires_at, audit_ids) = (
+ token_formatters.UnscopedPayload.disassemble(payload))
+
+ self.assertEqual(exp_user_id, user_id)
+ self.assertEqual(exp_methods, methods)
+ self.assertEqual(exp_expires_at, expires_at)
+ self.assertEqual(exp_audit_ids, audit_ids)
+
+ def test_project_scoped_payload_with_non_uuid_user_id(self):
+ exp_user_id = 'someNonUuidUserId'
+ exp_methods = ['password']
+ exp_project_id = uuid.uuid4().hex
+ exp_expires_at = utils.isotime(timeutils.utcnow(), subsecond=True)
+ exp_audit_ids = [provider.random_urlsafe_str()]
+
+ payload = token_formatters.ProjectScopedPayload.assemble(
+ exp_user_id, exp_methods, exp_project_id, exp_expires_at,
+ exp_audit_ids)
+
+ (user_id, methods, project_id, expires_at, audit_ids) = (
+ token_formatters.ProjectScopedPayload.disassemble(payload))
+
+ self.assertEqual(exp_user_id, user_id)
+ self.assertEqual(exp_methods, methods)
+ self.assertEqual(exp_project_id, project_id)
+ self.assertEqual(exp_expires_at, expires_at)
+ self.assertEqual(exp_audit_ids, audit_ids)
+
+ def test_project_scoped_payload_with_non_uuid_project_id(self):
+ exp_user_id = uuid.uuid4().hex
+ exp_methods = ['password']
+ exp_project_id = 'someNonUuidProjectId'
+ exp_expires_at = utils.isotime(timeutils.utcnow(), subsecond=True)
+ exp_audit_ids = [provider.random_urlsafe_str()]
+
+ payload = token_formatters.ProjectScopedPayload.assemble(
+ exp_user_id, exp_methods, exp_project_id, exp_expires_at,
+ exp_audit_ids)
+
+ (user_id, methods, project_id, expires_at, audit_ids) = (
+ token_formatters.ProjectScopedPayload.disassemble(payload))
+
+ self.assertEqual(exp_user_id, user_id)
+ self.assertEqual(exp_methods, methods)
+ self.assertEqual(exp_project_id, project_id)
+ self.assertEqual(exp_expires_at, expires_at)
+ self.assertEqual(exp_audit_ids, audit_ids)
+
+ def test_domain_scoped_payload_with_non_uuid_user_id(self):
+ exp_user_id = 'someNonUuidUserId'
+ exp_methods = ['password']
+ exp_domain_id = uuid.uuid4().hex
+ exp_expires_at = utils.isotime(timeutils.utcnow(), subsecond=True)
+ exp_audit_ids = [provider.random_urlsafe_str()]
+
+ payload = token_formatters.DomainScopedPayload.assemble(
+ exp_user_id, exp_methods, exp_domain_id, exp_expires_at,
+ exp_audit_ids)
+
+ (user_id, methods, domain_id, expires_at, audit_ids) = (
+ token_formatters.DomainScopedPayload.disassemble(payload))
+
+ self.assertEqual(exp_user_id, user_id)
+ self.assertEqual(exp_methods, methods)
+ self.assertEqual(exp_domain_id, domain_id)
+ self.assertEqual(exp_expires_at, expires_at)
+ self.assertEqual(exp_audit_ids, audit_ids)
+
+ def test_trust_scoped_payload_with_non_uuid_user_id(self):
+ exp_user_id = 'someNonUuidUserId'
+ exp_methods = ['password']
+ exp_project_id = uuid.uuid4().hex
+ exp_expires_at = utils.isotime(timeutils.utcnow(), subsecond=True)
+ exp_audit_ids = [provider.random_urlsafe_str()]
+ exp_trust_id = uuid.uuid4().hex
+
+ payload = token_formatters.TrustScopedPayload.assemble(
+ exp_user_id, exp_methods, exp_project_id, exp_expires_at,
+ exp_audit_ids, exp_trust_id)
+
+ (user_id, methods, project_id, expires_at, audit_ids, trust_id) = (
+ token_formatters.TrustScopedPayload.disassemble(payload))
+
+ self.assertEqual(exp_user_id, user_id)
+ self.assertEqual(exp_methods, methods)
+ self.assertEqual(exp_project_id, project_id)
+ self.assertEqual(exp_expires_at, expires_at)
+ self.assertEqual(exp_audit_ids, audit_ids)
+ self.assertEqual(exp_trust_id, trust_id)
+
+ def test_trust_scoped_payload_with_non_uuid_project_id(self):
+ exp_user_id = uuid.uuid4().hex
+ exp_methods = ['password']
+ exp_project_id = 'someNonUuidProjectId'
+ exp_expires_at = utils.isotime(timeutils.utcnow(), subsecond=True)
exp_audit_ids = [provider.random_urlsafe_str()]
exp_trust_id = uuid.uuid4().hex
@@ -181,3 +306,218 @@ class TestPayloads(tests.TestCase):
self.assertEqual(exp_expires_at, expires_at)
self.assertEqual(exp_audit_ids, audit_ids)
self.assertEqual(exp_trust_id, trust_id)
+
+ def test_federated_payload_with_non_uuid_ids(self):
+ exp_user_id = 'someNonUuidUserId'
+ exp_methods = ['password']
+ exp_expires_at = utils.isotime(timeutils.utcnow(), subsecond=True)
+ exp_audit_ids = [provider.random_urlsafe_str()]
+ exp_federated_info = {'group_ids': [{'id': 'someNonUuidGroupId'}],
+ 'idp_id': uuid.uuid4().hex,
+ 'protocol_id': uuid.uuid4().hex}
+
+ payload = token_formatters.FederatedUnscopedPayload.assemble(
+ exp_user_id, exp_methods, exp_expires_at, exp_audit_ids,
+ exp_federated_info)
+
+ (user_id, methods, expires_at, audit_ids, federated_info) = (
+ token_formatters.FederatedUnscopedPayload.disassemble(payload))
+
+ self.assertEqual(exp_user_id, user_id)
+ self.assertEqual(exp_methods, methods)
+ self.assertEqual(exp_expires_at, expires_at)
+ self.assertEqual(exp_audit_ids, audit_ids)
+ self.assertEqual(exp_federated_info['group_ids'][0]['id'],
+ federated_info['group_ids'][0]['id'])
+ self.assertEqual(exp_federated_info['idp_id'],
+ federated_info['idp_id'])
+ self.assertEqual(exp_federated_info['protocol_id'],
+ federated_info['protocol_id'])
+
+ def test_federated_project_scoped_payload(self):
+ exp_user_id = 'someNonUuidUserId'
+ exp_methods = ['token']
+ exp_project_id = uuid.uuid4().hex
+ exp_expires_at = utils.isotime(timeutils.utcnow(), subsecond=True)
+ exp_audit_ids = [provider.random_urlsafe_str()]
+ exp_federated_info = {'group_ids': [{'id': 'someNonUuidGroupId'}],
+ 'idp_id': uuid.uuid4().hex,
+ 'protocol_id': uuid.uuid4().hex}
+
+ payload = token_formatters.FederatedProjectScopedPayload.assemble(
+ exp_user_id, exp_methods, exp_project_id, exp_expires_at,
+ exp_audit_ids, exp_federated_info)
+
+ (user_id, methods, project_id, expires_at, audit_ids,
+ federated_info) = (
+ token_formatters.FederatedProjectScopedPayload.disassemble(
+ payload))
+
+ self.assertEqual(exp_user_id, user_id)
+ self.assertEqual(exp_methods, methods)
+ self.assertEqual(exp_project_id, project_id)
+ self.assertEqual(exp_expires_at, expires_at)
+ self.assertEqual(exp_audit_ids, audit_ids)
+ self.assertDictEqual(exp_federated_info, federated_info)
+
+ def test_federated_domain_scoped_payload(self):
+ exp_user_id = 'someNonUuidUserId'
+ exp_methods = ['token']
+ exp_domain_id = uuid.uuid4().hex
+ exp_expires_at = utils.isotime(timeutils.utcnow(), subsecond=True)
+ exp_audit_ids = [provider.random_urlsafe_str()]
+ exp_federated_info = {'group_ids': [{'id': 'someNonUuidGroupId'}],
+ 'idp_id': uuid.uuid4().hex,
+ 'protocol_id': uuid.uuid4().hex}
+
+ payload = token_formatters.FederatedDomainScopedPayload.assemble(
+ exp_user_id, exp_methods, exp_domain_id, exp_expires_at,
+ exp_audit_ids, exp_federated_info)
+
+ (user_id, methods, domain_id, expires_at, audit_ids,
+ federated_info) = (
+ token_formatters.FederatedDomainScopedPayload.disassemble(
+ payload))
+
+ self.assertEqual(exp_user_id, user_id)
+ self.assertEqual(exp_methods, methods)
+ self.assertEqual(exp_domain_id, domain_id)
+ self.assertEqual(exp_expires_at, expires_at)
+ self.assertEqual(exp_audit_ids, audit_ids)
+ self.assertDictEqual(exp_federated_info, federated_info)
+
+
+class TestFernetKeyRotation(tests.TestCase):
+ def setUp(self):
+ super(TestFernetKeyRotation, self).setUp()
+
+ # A collection of all previously-seen signatures of the key
+ # repository's contents.
+ self.key_repo_signatures = set()
+
+ @property
+ def keys(self):
+ """Key files converted to numbers."""
+ return sorted(
+ int(x) for x in os.listdir(CONF.fernet_tokens.key_repository))
+
+ @property
+ def key_repository_size(self):
+ """The number of keys in the key repository."""
+ return len(self.keys)
+
+ @property
+ def key_repository_signature(self):
+ """Create a "thumbprint" of the current key repository.
+
+ Because key files are renamed, this produces a hash of the contents of
+ the key files, ignoring their filenames.
+
+ The resulting signature can be used, for example, to ensure that you
+ have a unique set of keys after you perform a key rotation (taking a
+ static set of keys, and simply shuffling them, would fail such a test).
+
+ """
+ # Load the keys into a list.
+ keys = fernet_utils.load_keys()
+
+ # Sort the list of keys by the keys themselves (they were previously
+ # sorted by filename).
+ keys.sort()
+
+ # Create the thumbprint using all keys in the repository.
+ signature = hashlib.sha1()
+ for key in keys:
+ signature.update(key)
+ return signature.hexdigest()
+
+ def assertRepositoryState(self, expected_size):
+ """Validate the state of the key repository."""
+ self.assertEqual(expected_size, self.key_repository_size)
+ self.assertUniqueRepositoryState()
+
+ def assertUniqueRepositoryState(self):
+ """Ensures that the current key repo state has not been seen before."""
+ # This is assigned to a variable because it takes some work to
+ # calculate.
+ signature = self.key_repository_signature
+
+ # Ensure the signature is not in the set of previously seen signatures.
+ self.assertNotIn(signature, self.key_repo_signatures)
+
+ # Add the signature to the set of repository signatures to validate
+ # that we don't see it again later.
+ self.key_repo_signatures.add(signature)
+
+ def test_rotation(self):
+ # Initializing a key repository results in this many keys. We don't
+ # support max_active_keys being set any lower.
+ min_active_keys = 2
+
+ # Simulate every rotation strategy up to "rotating once a week while
+ # maintaining a year's worth of keys."
+ for max_active_keys in range(min_active_keys, 52 + 1):
+ self.config_fixture.config(group='fernet_tokens',
+ max_active_keys=max_active_keys)
+
+ # Ensure that resetting the key repository always results in 2
+ # active keys.
+ self.useFixture(ksfixtures.KeyRepository(self.config_fixture))
+
+ # Validate the initial repository state.
+ self.assertRepositoryState(expected_size=min_active_keys)
+
+ # The repository should be initialized with a staged key (0) and a
+ # primary key (1). The next key is just auto-incremented.
+ exp_keys = [0, 1]
+ next_key_number = exp_keys[-1] + 1 # keep track of next key
+ self.assertEqual(exp_keys, self.keys)
+
+ # Rotate the keys just enough times to fully populate the key
+ # repository.
+ for rotation in range(max_active_keys - min_active_keys):
+ fernet_utils.rotate_keys()
+ self.assertRepositoryState(expected_size=rotation + 3)
+
+ exp_keys.append(next_key_number)
+ next_key_number += 1
+ self.assertEqual(exp_keys, self.keys)
+
+ # We should have a fully populated key repository now.
+ self.assertEqual(max_active_keys, self.key_repository_size)
+
+ # Rotate an additional number of times to ensure that we maintain
+ # the desired number of active keys.
+ for rotation in range(10):
+ fernet_utils.rotate_keys()
+ self.assertRepositoryState(expected_size=max_active_keys)
+
+ exp_keys.pop(1)
+ exp_keys.append(next_key_number)
+ next_key_number += 1
+ self.assertEqual(exp_keys, self.keys)
+
+ def test_non_numeric_files(self):
+ self.useFixture(ksfixtures.KeyRepository(self.config_fixture))
+ evil_file = os.path.join(CONF.fernet_tokens.key_repository, '99.bak')
+ with open(evil_file, 'w'):
+ pass
+ fernet_utils.rotate_keys()
+ self.assertTrue(os.path.isfile(evil_file))
+ keys = 0
+ for x in os.listdir(CONF.fernet_tokens.key_repository):
+ if x == '99.bak':
+ continue
+ keys += 1
+ self.assertEqual(3, keys)
+
+
+class TestLoadKeys(tests.TestCase):
+ def test_non_numeric_files(self):
+ self.useFixture(ksfixtures.KeyRepository(self.config_fixture))
+ evil_file = os.path.join(CONF.fernet_tokens.key_repository, '~1')
+ with open(evil_file, 'w'):
+ pass
+ keys = fernet_utils.load_keys()
+ self.assertEqual(2, len(keys))
+ self.assertTrue(len(keys[0]))
diff --git a/keystone-moon/keystone/tests/unit/token/test_pki_provider.py b/keystone-moon/keystone/tests/unit/token/test_pki_provider.py
new file mode 100644
index 00000000..dad31266
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/token/test_pki_provider.py
@@ -0,0 +1,26 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from keystone.tests import unit as tests
+from keystone.token.providers import pki
+
+
+class TestPkiTokenProvider(tests.TestCase):
+ def setUp(self):
+ super(TestPkiTokenProvider, self).setUp()
+ self.provider = pki.Provider()
+
+ def test_supports_bind_authentication_returns_true(self):
+ self.assertTrue(self.provider._supports_bind_authentication)
+
+ def test_need_persistence_return_true(self):
+ self.assertIs(True, self.provider.needs_persistence())
diff --git a/keystone-moon/keystone/tests/unit/token/test_pkiz_provider.py b/keystone-moon/keystone/tests/unit/token/test_pkiz_provider.py
new file mode 100644
index 00000000..4a492bc1
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/token/test_pkiz_provider.py
@@ -0,0 +1,26 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from keystone.tests import unit as tests
+from keystone.token.providers import pkiz
+
+
+class TestPkizTokenProvider(tests.TestCase):
+ def setUp(self):
+ super(TestPkizTokenProvider, self).setUp()
+ self.provider = pkiz.Provider()
+
+ def test_supports_bind_authentication_returns_true(self):
+ self.assertTrue(self.provider._supports_bind_authentication)
+
+ def test_need_persistence_return_true(self):
+ self.assertIs(True, self.provider.needs_persistence())
diff --git a/keystone-moon/keystone/tests/unit/token/test_provider.py b/keystone-moon/keystone/tests/unit/token/test_provider.py
index e5910690..be831484 100644
--- a/keystone-moon/keystone/tests/unit/token/test_provider.py
+++ b/keystone-moon/keystone/tests/unit/token/test_provider.py
@@ -10,7 +10,8 @@
# License for the specific language governing permissions and limitations
# under the License.
-import urllib
+import six
+from six.moves import urllib
from keystone.tests import unit
from keystone.token import provider
@@ -19,11 +20,11 @@ from keystone.token import provider
class TestRandomStrings(unit.BaseTestCase):
def test_strings_are_url_safe(self):
s = provider.random_urlsafe_str()
- self.assertEqual(s, urllib.quote_plus(s))
+ self.assertEqual(s, urllib.parse.quote_plus(s))
def test_strings_can_be_converted_to_bytes(self):
s = provider.random_urlsafe_str()
- self.assertTrue(isinstance(s, basestring))
+ self.assertTrue(isinstance(s, six.string_types))
b = provider.random_urlsafe_str_to_bytes(s)
self.assertTrue(isinstance(b, bytes))
diff --git a/keystone-moon/keystone/tests/unit/token/test_token_model.py b/keystone-moon/keystone/tests/unit/token/test_token_model.py
index b2474289..3959d901 100644
--- a/keystone-moon/keystone/tests/unit/token/test_token_model.py
+++ b/keystone-moon/keystone/tests/unit/token/test_token_model.py
@@ -15,7 +15,9 @@ import uuid
from oslo_config import cfg
from oslo_utils import timeutils
+from six.moves import range
+from keystone.contrib.federation import constants as federation_constants
from keystone import exception
from keystone.models import token_model
from keystone.tests.unit import core
@@ -127,7 +129,7 @@ class TestKeystoneTokenModel(core.TestCase):
self.assertIsNone(token_data.federation_protocol_id)
self.assertIsNone(token_data.federation_idp_id)
- token_data['user'][token_model.federation.FEDERATION] = federation_data
+ token_data['user'][federation_constants.FEDERATION] = federation_data
self.assertTrue(token_data.is_federated_user)
self.assertEqual([x['id'] for x in federation_data['groups']],
@@ -149,7 +151,7 @@ class TestKeystoneTokenModel(core.TestCase):
self.assertIsNone(token_data.federation_protocol_id)
self.assertIsNone(token_data.federation_idp_id)
- token_data['user'][token_model.federation.FEDERATION] = federation_data
+ token_data['user'][federation_constants.FEDERATION] = federation_data
# Federated users should not exist in V2, the data should remain empty
self.assertFalse(token_data.is_federated_user)
diff --git a/keystone-moon/keystone/tests/unit/token/test_uuid_provider.py b/keystone-moon/keystone/tests/unit/token/test_uuid_provider.py
new file mode 100644
index 00000000..b49427f0
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/token/test_uuid_provider.py
@@ -0,0 +1,26 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from keystone.tests import unit as tests
+from keystone.token.providers import uuid
+
+
+class TestUuidTokenProvider(tests.TestCase):
+ def setUp(self):
+ super(TestUuidTokenProvider, self).setUp()
+ self.provider = uuid.Provider()
+
+ def test_supports_bind_authentication_returns_true(self):
+ self.assertTrue(self.provider._supports_bind_authentication)
+
+ def test_need_persistence_return_true(self):
+ self.assertIs(True, self.provider.needs_persistence())