aboutsummaryrefslogtreecommitdiffstats
path: root/keystone-moon/keystone/token/providers
diff options
context:
space:
mode:
authorasteroide <thomas.duval@orange.com>2015-09-01 16:03:26 +0200
committerasteroide <thomas.duval@orange.com>2015-09-01 16:04:53 +0200
commit92fd2dbfb672d7b2b1cdfd5dd5cf89f7716b3e12 (patch)
tree7ba22297042019e7363fa1d4ad26d1c32c5908c6 /keystone-moon/keystone/token/providers
parent26e753254f3e43399cc76e62892908b7742415e8 (diff)
Update Keystone code from official Github repository with branch Master on 09/01/2015.
Change-Id: I0ff6099e6e2580f87f502002a998bbfe12673498
Diffstat (limited to 'keystone-moon/keystone/token/providers')
-rw-r--r--keystone-moon/keystone/token/providers/common.py157
-rw-r--r--keystone-moon/keystone/token/providers/fernet/core.py229
-rw-r--r--keystone-moon/keystone/token/providers/fernet/token_formatters.py188
-rw-r--r--keystone-moon/keystone/token/providers/fernet/utils.py58
-rw-r--r--keystone-moon/keystone/token/providers/pki.py8
-rw-r--r--keystone-moon/keystone/token/providers/pkiz.py8
-rw-r--r--keystone-moon/keystone/token/providers/uuid.py8
7 files changed, 435 insertions, 221 deletions
diff --git a/keystone-moon/keystone/token/providers/common.py b/keystone-moon/keystone/token/providers/common.py
index 717e1495..b71458cd 100644
--- a/keystone-moon/keystone/token/providers/common.py
+++ b/keystone-moon/keystone/token/providers/common.py
@@ -14,17 +14,17 @@
from oslo_config import cfg
from oslo_log import log
+from oslo_log import versionutils
from oslo_serialization import jsonutils
-from oslo_utils import timeutils
import six
from six.moves.urllib import parse
from keystone.common import controller as common_controller
from keystone.common import dependency
-from keystone.contrib import federation
+from keystone.common import utils
+from keystone.contrib.federation import constants as federation_constants
from keystone import exception
from keystone.i18n import _, _LE
-from keystone.openstack.common import versionutils
from keystone import token
from keystone.token import provider
@@ -37,18 +37,33 @@ CONF = cfg.CONF
class V2TokenDataHelper(object):
"""Creates V2 token data."""
- def v3_to_v2_token(self, token_id, v3_token_data):
+ def v3_to_v2_token(self, v3_token_data):
token_data = {}
# Build v2 token
v3_token = v3_token_data['token']
token = {}
- token['id'] = token_id
token['expires'] = v3_token.get('expires_at')
token['issued_at'] = v3_token.get('issued_at')
token['audit_ids'] = v3_token.get('audit_ids')
+ # Bail immediately if this is a domain-scoped token, which is not
+ # supported by the v2 API at all.
+ if 'domain' in v3_token:
+ raise exception.Unauthorized(_(
+ 'Domains are not supported by the v2 API. Please use the v3 '
+ 'API instead.'))
+
+ # Bail if this is a project-scoped token outside the default domain,
+ # which may result in a namespace collision with a project inside the
+ # default domain.
if 'project' in v3_token:
+ if (v3_token['project']['domain']['id'] !=
+ CONF.identity.default_domain_id):
+ raise exception.Unauthorized(_(
+ 'Project not found in the default domain (please use the '
+ 'v3 API instead): %s') % v3_token['project']['id'])
+
# v3 token_data does not contain all tenant attributes
tenant = self.resource_api.get_project(
v3_token['project']['id'])
@@ -58,14 +73,32 @@ class V2TokenDataHelper(object):
# Build v2 user
v3_user = v3_token['user']
+
+ # Bail if this is a token outside the default domain,
+ # which may result in a namespace collision with a project inside the
+ # default domain.
+ if ('domain' in v3_user and v3_user['domain']['id'] !=
+ CONF.identity.default_domain_id):
+ raise exception.Unauthorized(_(
+ 'User not found in the default domain (please use the v3 API '
+ 'instead): %s') % v3_user['id'])
+
user = common_controller.V2Controller.v3_to_v2_user(v3_user)
+ # Maintain Trust Data
+ if 'OS-TRUST:trust' in v3_token:
+ v3_trust_data = v3_token['OS-TRUST:trust']
+ token_data['trust'] = {
+ 'trustee_user_id': v3_trust_data['trustee_user']['id'],
+ 'id': v3_trust_data['id'],
+ 'trustor_user_id': v3_trust_data['trustor_user']['id'],
+ 'impersonation': v3_trust_data['impersonation']
+ }
+
# Set user roles
user['roles'] = []
role_ids = []
for role in v3_token.get('roles', []):
- # Filter role id since it's not included in v2 token response
- role_ids.append(role.pop('id'))
user['roles'].append(role)
user['roles_links'] = []
@@ -99,7 +132,7 @@ class V2TokenDataHelper(object):
expires = token_ref.get('expires', provider.default_expire_time())
if expires is not None:
if not isinstance(expires, six.text_type):
- expires = timeutils.isotime(expires)
+ expires = utils.isotime(expires)
token_data = token_ref.get('token_data')
if token_data:
@@ -112,7 +145,7 @@ class V2TokenDataHelper(object):
o = {'access': {'token': {'id': token_ref['id'],
'expires': expires,
- 'issued_at': timeutils.strtime(),
+ 'issued_at': utils.strtime(),
'audit_ids': audit_info
},
'user': {'id': user_ref['id'],
@@ -181,8 +214,8 @@ class V2TokenDataHelper(object):
return []
services = {}
- for region, region_ref in six.iteritems(catalog_ref):
- for service, service_ref in six.iteritems(region_ref):
+ for region, region_ref in catalog_ref.items():
+ for service, service_ref in region_ref.items():
new_service_ref = services.get(service, {})
new_service_ref['name'] = service_ref.pop('name')
new_service_ref['type'] = service
@@ -195,7 +228,7 @@ class V2TokenDataHelper(object):
new_service_ref['endpoints'] = endpoints_ref
services[service] = new_service_ref
- return services.values()
+ return list(services.values())
@dependency.requires('assignment_api', 'catalog_api', 'federation_api',
@@ -239,10 +272,26 @@ class V3TokenDataHelper(object):
user_id, project_id)
return [self.role_api.get_role(role_id) for role_id in roles]
- def _populate_roles_for_groups(self, group_ids,
- project_id=None, domain_id=None,
- user_id=None):
- def _check_roles(roles, user_id, project_id, domain_id):
+ def populate_roles_for_groups(self, token_data, group_ids,
+ project_id=None, domain_id=None,
+ user_id=None):
+ """Populate roles basing on provided groups and project/domain
+
+ Used for ephemeral users with dynamically assigned groups.
+ This method does not return anything, yet it modifies token_data in
+ place.
+
+ :param token_data: a dictionary used for building token response
+ :group_ids: list of group IDs a user is a member of
+ :project_id: project ID to scope to
+ :domain_id: domain ID to scope to
+ :user_id: user ID
+
+ :raises: exception.Unauthorized - when no roles were found for a
+ (group_ids, project_id) or (group_ids, domain_id) pairs.
+
+ """
+ def check_roles(roles, user_id, project_id, domain_id):
# User was granted roles so simply exit this function.
if roles:
return
@@ -264,8 +313,8 @@ class V3TokenDataHelper(object):
roles = self.assignment_api.get_roles_for_groups(group_ids,
project_id,
domain_id)
- _check_roles(roles, user_id, project_id, domain_id)
- return roles
+ check_roles(roles, user_id, project_id, domain_id)
+ token_data['roles'] = roles
def _populate_user(self, token_data, user_id, trust):
if 'user' in token_data:
@@ -393,10 +442,10 @@ class V3TokenDataHelper(object):
if not expires:
expires = provider.default_expire_time()
if not isinstance(expires, six.string_types):
- expires = timeutils.isotime(expires, subsecond=True)
+ expires = utils.isotime(expires, subsecond=True)
token_data['expires_at'] = expires
token_data['issued_at'] = (issued_at or
- timeutils.isotime(subsecond=True))
+ utils.isotime(subsecond=True))
def _populate_audit_info(self, token_data, audit_info=None):
if audit_info is None or isinstance(audit_info, six.string_types):
@@ -420,7 +469,7 @@ class V3TokenDataHelper(object):
versionutils.deprecated(
what='passing token data with "extras"',
as_of=versionutils.deprecated.KILO,
- in_favor_of='well-defined APIs')
+ in_favor_of='well-defined APIs')(lambda: None)()
token_data = {'methods': method_names,
'extras': extras}
@@ -490,13 +539,21 @@ class BaseProvider(provider.Provider):
return token_id, token_data
def _is_mapped_token(self, auth_context):
- return (federation.IDENTITY_PROVIDER in auth_context and
- federation.PROTOCOL in auth_context)
+ return (federation_constants.IDENTITY_PROVIDER in auth_context and
+ federation_constants.PROTOCOL in auth_context)
def issue_v3_token(self, user_id, method_names, expires_at=None,
project_id=None, domain_id=None, auth_context=None,
trust=None, metadata_ref=None, include_catalog=True,
parent_audit_id=None):
+ if auth_context and auth_context.get('bind'):
+ # NOTE(lbragstad): Check if the token provider being used actually
+ # supports bind authentication methods before proceeding.
+ if not self._supports_bind_authentication:
+ raise exception.NotImplemented(_(
+ 'The configured token provider does not support bind '
+ 'authentication.'))
+
# for V2, trust is stashed in metadata_ref
if (CONF.trust.enabled and not trust and metadata_ref and
'trust_id' in metadata_ref):
@@ -530,38 +587,30 @@ class BaseProvider(provider.Provider):
return token_id, token_data
def _handle_mapped_tokens(self, auth_context, project_id, domain_id):
- def get_federated_domain():
- return (CONF.federation.federated_domain_name or
- federation.FEDERATED_DOMAIN_KEYWORD)
-
- federated_domain = get_federated_domain()
user_id = auth_context['user_id']
group_ids = auth_context['group_ids']
- idp = auth_context[federation.IDENTITY_PROVIDER]
- protocol = auth_context[federation.PROTOCOL]
+ idp = auth_context[federation_constants.IDENTITY_PROVIDER]
+ protocol = auth_context[federation_constants.PROTOCOL]
token_data = {
'user': {
'id': user_id,
'name': parse.unquote(user_id),
- federation.FEDERATION: {
+ federation_constants.FEDERATION: {
+ 'groups': [{'id': x} for x in group_ids],
'identity_provider': {'id': idp},
'protocol': {'id': protocol}
},
'domain': {
- 'id': federated_domain,
- 'name': federated_domain
+ 'id': CONF.federation.federated_domain_name,
+ 'name': CONF.federation.federated_domain_name
}
}
}
if project_id or domain_id:
- roles = self.v3_token_data_helper._populate_roles_for_groups(
- group_ids, project_id, domain_id, user_id)
- token_data.update({'roles': roles})
- else:
- token_data['user'][federation.FEDERATION].update({
- 'groups': [{'id': x} for x in group_ids]
- })
+ self.v3_token_data_helper.populate_roles_for_groups(
+ token_data, group_ids, project_id, domain_id, user_id)
+
return token_data
def _verify_token_ref(self, token_ref):
@@ -637,30 +686,10 @@ class BaseProvider(provider.Provider):
# management layer is now pluggable, one can always provide
# their own implementation to suit their needs.
token_data = token_ref.get('token_data')
- if (not token_data or
- self.get_token_version(token_data) !=
- token.provider.V2):
- # token is created by old v2 logic
- metadata_ref = token_ref['metadata']
- roles_ref = []
- for role_id in metadata_ref.get('roles', []):
- roles_ref.append(self.role_api.get_role(role_id))
-
- # Get a service catalog if possible
- # This is needed for on-behalf-of requests
- catalog_ref = None
- if token_ref.get('tenant'):
- catalog_ref = self.catalog_api.get_catalog(
- token_ref['user']['id'],
- token_ref['tenant']['id'])
-
- trust_ref = None
- if CONF.trust.enabled and 'trust_id' in metadata_ref:
- trust_ref = self.trust_api.get_trust(
- metadata_ref['trust_id'])
-
- token_data = self.v2_token_data_helper.format_token(
- token_ref, roles_ref, catalog_ref, trust_ref)
+ if (self.get_token_version(token_data) != token.provider.V2):
+ # Validate the V3 token as V2
+ token_data = self.v2_token_data_helper.v3_to_v2_token(
+ token_data)
trust_id = token_data['access'].get('trust', {}).get('id')
if trust_id:
diff --git a/keystone-moon/keystone/token/providers/fernet/core.py b/keystone-moon/keystone/token/providers/fernet/core.py
index b1da263b..1bbacb03 100644
--- a/keystone-moon/keystone/token/providers/fernet/core.py
+++ b/keystone-moon/keystone/token/providers/fernet/core.py
@@ -14,7 +14,8 @@ from oslo_config import cfg
from oslo_log import log
from keystone.common import dependency
-from keystone.contrib import federation
+from keystone.common import utils as ks_utils
+from keystone.contrib.federation import constants as federation_constants
from keystone import exception
from keystone.i18n import _
from keystone.token import provider
@@ -59,6 +60,9 @@ class Provider(common.BaseProvider):
if token_ref.get('tenant'):
project_id = token_ref['tenant']['id']
+ # maintain expiration time across rescopes
+ expires = token_ref.get('expires')
+
parent_audit_id = token_ref.get('parent_audit_id')
# If parent_audit_id is defined then a token authentication was made
if parent_audit_id:
@@ -80,136 +84,132 @@ class Provider(common.BaseProvider):
project_id=project_id,
token=token_ref,
include_catalog=False,
- audit_info=audit_ids)
+ audit_info=audit_ids,
+ expires=expires)
expires_at = v3_token_data['token']['expires_at']
token_id = self.token_formatter.create_token(user_id, expires_at,
audit_ids,
methods=method_names,
project_id=project_id)
+ self._build_issued_at_info(token_id, v3_token_data)
# Convert v3 to v2 token data and build v2 catalog
- token_data = self.v2_token_data_helper.v3_to_v2_token(token_id,
- v3_token_data)
+ token_data = self.v2_token_data_helper.v3_to_v2_token(v3_token_data)
+ token_data['access']['token']['id'] = token_id
+
+ return token_id, token_data
+ def issue_v3_token(self, *args, **kwargs):
+ token_id, token_data = super(Provider, self).issue_v3_token(
+ *args, **kwargs)
+ self._build_issued_at_info(token_id, token_data)
return token_id, token_data
+ def _build_issued_at_info(self, token_id, token_data):
+ # NOTE(roxanaghe, lbragstad): We must use the creation time that
+ # Fernet builds into it's token. The Fernet spec details that the
+ # token creation time is built into the token, outside of the payload
+ # provided by Keystone. This is the reason why we don't pass the
+ # issued_at time in the payload. This also means that we shouldn't
+ # return a token reference with a creation time that we created
+ # when Fernet uses a different creation time. We should use the
+ # creation time provided by Fernet because it's the creation time
+ # that we have to rely on when we validate the token.
+ fernet_creation_datetime_obj = self.token_formatter.creation_time(
+ token_id)
+ token_data['token']['issued_at'] = ks_utils.isotime(
+ at=fernet_creation_datetime_obj, subsecond=True)
+
def _build_federated_info(self, token_data):
"""Extract everything needed for federated tokens.
- This dictionary is passed to the FederatedPayload token formatter,
- which unpacks the values and builds the Fernet token.
+ This dictionary is passed to federated token formatters, which unpack
+ the values and build federated Fernet tokens.
"""
- group_ids = token_data.get('user', {}).get(
- federation.FEDERATION, {}).get('groups')
- idp_id = token_data.get('user', {}).get(
- federation.FEDERATION, {}).get('identity_provider', {}).get('id')
- protocol_id = token_data.get('user', {}).get(
- federation.FEDERATION, {}).get('protocol', {}).get('id')
- if not group_ids:
- group_ids = list()
- federated_dict = dict(group_ids=group_ids, idp_id=idp_id,
- protocol_id=protocol_id)
- return federated_dict
+ idp_id = token_data['token'].get('user', {}).get(
+ federation_constants.FEDERATION, {}).get(
+ 'identity_provider', {}).get('id')
+ protocol_id = token_data['token'].get('user', {}).get(
+ federation_constants.FEDERATION, {}).get('protocol', {}).get('id')
+ # If we don't have an identity provider ID and a protocol ID, it's safe
+ # to assume we aren't dealing with a federated token.
+ if not (idp_id and protocol_id):
+ return None
+
+ group_ids = token_data['token'].get('user', {}).get(
+ federation_constants.FEDERATION, {}).get('groups')
+
+ return {'group_ids': group_ids,
+ 'idp_id': idp_id,
+ 'protocol_id': protocol_id}
def _rebuild_federated_info(self, federated_dict, user_id):
"""Format federated information into the token reference.
- The federated_dict is passed back from the FederatedPayload token
- formatter. The responsibility of this method is to format the
- information passed back from the token formatter into the token
- reference before constructing the token data from the
- V3TokenDataHelper.
+ The federated_dict is passed back from the federated token formatters.
+ The responsibility of this method is to format the information passed
+ back from the token formatter into the token reference before
+ constructing the token data from the V3TokenDataHelper.
"""
g_ids = federated_dict['group_ids']
idp_id = federated_dict['idp_id']
protocol_id = federated_dict['protocol_id']
- federated_info = dict(groups=g_ids,
- identity_provider=dict(id=idp_id),
- protocol=dict(id=protocol_id))
- token_dict = {'user': {federation.FEDERATION: federated_info}}
- token_dict['user']['id'] = user_id
- token_dict['user']['name'] = user_id
+
+ federated_info = {
+ 'groups': g_ids,
+ 'identity_provider': {'id': idp_id},
+ 'protocol': {'id': protocol_id}
+ }
+
+ token_dict = {
+ 'user': {
+ federation_constants.FEDERATION: federated_info,
+ 'id': user_id,
+ 'name': user_id
+ }
+ }
+
return token_dict
- def issue_v3_token(self, user_id, method_names, expires_at=None,
- project_id=None, domain_id=None, auth_context=None,
- trust=None, metadata_ref=None, include_catalog=True,
- parent_audit_id=None):
- """Issue a V3 formatted token.
-
- Here is where we need to detect what is given to us, and what kind of
- token the user is expecting. Depending on the outcome of that, we can
- pass all the information to be packed to the proper token format
- handler.
-
- :param user_id: ID of the user
- :param method_names: method of authentication
- :param expires_at: token expiration time
- :param project_id: ID of the project being scoped to
- :param domain_id: ID of the domain being scoped to
- :param auth_context: authentication context
- :param trust: ID of the trust
- :param metadata_ref: metadata reference
- :param include_catalog: return the catalog in the response if True,
- otherwise don't return the catalog
- :param parent_audit_id: ID of the patent audit entity
- :returns: tuple containing the id of the token and the token data
+ def _rebuild_federated_token_roles(self, token_dict, federated_dict,
+ user_id, project_id, domain_id):
+ """Populate roles based on (groups, project/domain) pair.
- """
- # TODO(lbragstad): Currently, Fernet tokens don't support bind in the
- # token format. Raise a 501 if we're dealing with bind.
- if auth_context.get('bind'):
- raise exception.NotImplemented()
+ We must populate roles from (groups, project/domain) as ephemeral users
+ don't exist in the backend. Upon success, a ``roles`` key will be added
+ to ``token_dict``.
- token_ref = None
- # NOTE(lbragstad): This determines if we are dealing with a federated
- # token or not. The groups for the user will be in the returned token
- # reference.
- federated_dict = None
- if auth_context and self._is_mapped_token(auth_context):
- token_ref = self._handle_mapped_tokens(
- auth_context, project_id, domain_id)
- federated_dict = self._build_federated_info(token_ref)
-
- token_data = self.v3_token_data_helper.get_token_data(
- user_id,
- method_names,
- auth_context.get('extras') if auth_context else None,
- domain_id=domain_id,
- project_id=project_id,
- expires=expires_at,
- trust=trust,
- bind=auth_context.get('bind') if auth_context else None,
- token=token_ref,
- include_catalog=include_catalog,
- audit_info=parent_audit_id)
+ :param token_dict: dictionary with data used for building token
+ :param federated_dict: federated information such as identity provider
+ protocol and set of group IDs
+ :param user_id: user ID
+ :param project_id: project ID the token is being scoped to
+ :param domain_id: domain ID the token is being scoped to
- token = self.token_formatter.create_token(
- user_id,
- token_data['token']['expires_at'],
- token_data['token']['audit_ids'],
- methods=method_names,
- domain_id=domain_id,
- project_id=project_id,
- trust_id=token_data['token'].get('OS-TRUST:trust', {}).get('id'),
- federated_info=federated_dict)
- return token, token_data
+ """
+ group_ids = [x['id'] for x in federated_dict['group_ids']]
+ self.v3_token_data_helper.populate_roles_for_groups(
+ token_dict, group_ids, project_id, domain_id, user_id)
def validate_v2_token(self, token_ref):
"""Validate a V2 formatted token.
:param token_ref: reference describing the token to validate
:returns: the token data
+ :raises keystone.exception.TokenNotFound: if token format is invalid
:raises keystone.exception.Unauthorized: if v3 token is used
"""
- (user_id, methods,
- audit_ids, domain_id,
- project_id, trust_id,
- federated_info, created_at,
- expires_at) = self.token_formatter.validate_token(token_ref)
+ try:
+ (user_id, methods,
+ audit_ids, domain_id,
+ project_id, trust_id,
+ federated_info, created_at,
+ expires_at) = self.token_formatter.validate_token(token_ref)
+ except exception.ValidationError as e:
+ raise exception.TokenNotFound(e)
if trust_id or domain_id or federated_info:
msg = _('This is not a v2.0 Fernet token. Use v3 for trust, '
@@ -225,26 +225,36 @@ class Provider(common.BaseProvider):
token=token_ref,
include_catalog=False,
audit_info=audit_ids)
- return self.v2_token_data_helper.v3_to_v2_token(token_ref,
- v3_token_data)
+ token_data = self.v2_token_data_helper.v3_to_v2_token(v3_token_data)
+ token_data['access']['token']['id'] = token_ref
+ return token_data
def validate_v3_token(self, token):
"""Validate a V3 formatted token.
:param token: a string describing the token to validate
:returns: the token data
- :raises keystone.exception.Unauthorized: if token format version isn't
+ :raises keystone.exception.TokenNotFound: if token format version isn't
supported
"""
- (user_id, methods, audit_ids, domain_id, project_id, trust_id,
- federated_info, created_at, expires_at) = (
- self.token_formatter.validate_token(token))
+ try:
+ (user_id, methods, audit_ids, domain_id, project_id, trust_id,
+ federated_info, created_at, expires_at) = (
+ self.token_formatter.validate_token(token))
+ except exception.ValidationError as e:
+ raise exception.TokenNotFound(e)
token_dict = None
+ trust_ref = None
if federated_info:
token_dict = self._rebuild_federated_info(federated_info, user_id)
- trust_ref = self.trust_api.get_trust(trust_id)
+ if project_id or domain_id:
+ self._rebuild_federated_token_roles(token_dict, federated_info,
+ user_id, project_id,
+ domain_id)
+ if trust_id:
+ trust_ref = self.trust_api.get_trust(trust_id)
return self.v3_token_data_helper.get_token_data(
user_id,
@@ -264,4 +274,21 @@ class Provider(common.BaseProvider):
:type token_data: dict
:raises keystone.exception.NotImplemented: when called
"""
- raise exception.NotImplemented()
+ return self.token_formatter.create_token(
+ token_data['token']['user']['id'],
+ token_data['token']['expires_at'],
+ token_data['token']['audit_ids'],
+ methods=token_data['token'].get('methods'),
+ domain_id=token_data['token'].get('domain', {}).get('id'),
+ project_id=token_data['token'].get('project', {}).get('id'),
+ trust_id=token_data['token'].get('OS-TRUST:trust', {}).get('id'),
+ federated_info=self._build_federated_info(token_data)
+ )
+
+ @property
+ def _supports_bind_authentication(self):
+ """Return if the token provider supports bind authentication methods.
+
+ :returns: False
+ """
+ return False
diff --git a/keystone-moon/keystone/token/providers/fernet/token_formatters.py b/keystone-moon/keystone/token/providers/fernet/token_formatters.py
index 50960923..d1dbb08c 100644
--- a/keystone-moon/keystone/token/providers/fernet/token_formatters.py
+++ b/keystone-moon/keystone/token/providers/fernet/token_formatters.py
@@ -21,11 +21,12 @@ from oslo_config import cfg
from oslo_log import log
from oslo_utils import timeutils
import six
-from six.moves import urllib
+from six.moves import map, urllib
from keystone.auth import plugins as auth_plugins
+from keystone.common import utils as ks_utils
from keystone import exception
-from keystone.i18n import _
+from keystone.i18n import _, _LI
from keystone.token import provider
from keystone.token.providers.fernet import utils
@@ -60,7 +61,7 @@ class TokenFormatter(object):
if not keys:
raise exception.KeysNotFound()
- fernet_instances = [fernet.Fernet(key) for key in utils.load_keys()]
+ fernet_instances = [fernet.Fernet(key) for key in keys]
return fernet.MultiFernet(fernet_instances)
def pack(self, payload):
@@ -74,8 +75,9 @@ class TokenFormatter(object):
try:
return self.crypto.decrypt(token)
- except fernet.InvalidToken as e:
- raise exception.Unauthorized(six.text_type(e))
+ except fernet.InvalidToken:
+ raise exception.ValidationError(
+ _('This is not a recognized Fernet token'))
@classmethod
def creation_time(cls, fernet_token):
@@ -115,9 +117,27 @@ class TokenFormatter(object):
expires_at,
audit_ids,
trust_id)
+ elif project_id and federated_info:
+ version = FederatedProjectScopedPayload.version
+ payload = FederatedProjectScopedPayload.assemble(
+ user_id,
+ methods,
+ project_id,
+ expires_at,
+ audit_ids,
+ federated_info)
+ elif domain_id and federated_info:
+ version = FederatedDomainScopedPayload.version
+ payload = FederatedDomainScopedPayload.assemble(
+ user_id,
+ methods,
+ domain_id,
+ expires_at,
+ audit_ids,
+ federated_info)
elif federated_info:
- version = FederatedPayload.version
- payload = FederatedPayload.assemble(
+ version = FederatedUnscopedPayload.version
+ payload = FederatedUnscopedPayload.assemble(
user_id,
methods,
expires_at,
@@ -151,6 +171,17 @@ class TokenFormatter(object):
serialized_payload = msgpack.packb(versioned_payload)
token = self.pack(serialized_payload)
+ # NOTE(lbragstad): We should warn against Fernet tokens that are over
+ # 255 characters in length. This is mostly due to persisting the tokens
+ # in a backend store of some kind that might have a limit of 255
+ # characters. Even though Keystone isn't storing a Fernet token
+ # anywhere, we can't say it isn't being stored somewhere else with
+ # those kind of backend constraints.
+ if len(token) > 255:
+ LOG.info(_LI('Fernet token created with length of %d '
+ 'characters, which exceeds 255 characters'),
+ len(token))
+
return token
def validate_token(self, token):
@@ -181,21 +212,29 @@ class TokenFormatter(object):
elif version == TrustScopedPayload.version:
(user_id, methods, project_id, expires_at, audit_ids, trust_id) = (
TrustScopedPayload.disassemble(payload))
- elif version == FederatedPayload.version:
+ elif version == FederatedUnscopedPayload.version:
(user_id, methods, expires_at, audit_ids, federated_info) = (
- FederatedPayload.disassemble(payload))
+ FederatedUnscopedPayload.disassemble(payload))
+ elif version == FederatedProjectScopedPayload.version:
+ (user_id, methods, project_id, expires_at, audit_ids,
+ federated_info) = FederatedProjectScopedPayload.disassemble(
+ payload)
+ elif version == FederatedDomainScopedPayload.version:
+ (user_id, methods, domain_id, expires_at, audit_ids,
+ federated_info) = FederatedDomainScopedPayload.disassemble(
+ payload)
else:
- # If the token_format is not recognized, raise Unauthorized.
- raise exception.Unauthorized(_(
+ # If the token_format is not recognized, raise ValidationError.
+ raise exception.ValidationError(_(
'This is not a recognized Fernet payload version: %s') %
version)
# rather than appearing in the payload, the creation time is encoded
# into the token format itself
created_at = TokenFormatter.creation_time(token)
- created_at = timeutils.isotime(at=created_at, subsecond=True)
+ created_at = ks_utils.isotime(at=created_at, subsecond=True)
expires_at = timeutils.parse_isotime(expires_at)
- expires_at = timeutils.isotime(at=expires_at, subsecond=True)
+ expires_at = ks_utils.isotime(at=expires_at, subsecond=True)
return (user_id, methods, audit_ids, domain_id, project_id, trust_id,
federated_info, created_at, expires_at)
@@ -273,8 +312,8 @@ class BasePayload(object):
:returns: a time formatted strings
"""
- time_object = datetime.datetime.utcfromtimestamp(int(time_int))
- return timeutils.isotime(time_object)
+ time_object = datetime.datetime.utcfromtimestamp(time_int)
+ return ks_utils.isotime(time_object, subsecond=True)
@classmethod
def attempt_convert_uuid_hex_to_bytes(cls, value):
@@ -319,7 +358,7 @@ class UnscopedPayload(BasePayload):
:returns: the payload of an unscoped token
"""
- b_user_id = cls.convert_uuid_hex_to_bytes(user_id)
+ b_user_id = cls.attempt_convert_uuid_hex_to_bytes(user_id)
methods = auth_plugins.convert_method_list_to_integer(methods)
expires_at_int = cls._convert_time_string_to_int(expires_at)
b_audit_ids = list(map(provider.random_urlsafe_str_to_bytes,
@@ -335,7 +374,7 @@ class UnscopedPayload(BasePayload):
audit_ids
"""
- user_id = cls.convert_uuid_bytes_to_hex(payload[0])
+ user_id = cls.attempt_convert_uuid_bytes_to_hex(payload[0])
methods = auth_plugins.convert_integer_to_method_list(payload[1])
expires_at_str = cls._convert_int_to_time_string(payload[2])
audit_ids = list(map(provider.base64_encode, payload[3]))
@@ -357,7 +396,7 @@ class DomainScopedPayload(BasePayload):
:returns: the payload of a domain-scoped token
"""
- b_user_id = cls.convert_uuid_hex_to_bytes(user_id)
+ b_user_id = cls.attempt_convert_uuid_hex_to_bytes(user_id)
methods = auth_plugins.convert_method_list_to_integer(methods)
try:
b_domain_id = cls.convert_uuid_hex_to_bytes(domain_id)
@@ -381,7 +420,7 @@ class DomainScopedPayload(BasePayload):
expires_at_str, and audit_ids
"""
- user_id = cls.convert_uuid_bytes_to_hex(payload[0])
+ user_id = cls.attempt_convert_uuid_bytes_to_hex(payload[0])
methods = auth_plugins.convert_integer_to_method_list(payload[1])
try:
domain_id = cls.convert_uuid_bytes_to_hex(payload[2])
@@ -412,9 +451,9 @@ class ProjectScopedPayload(BasePayload):
:returns: the payload of a project-scoped token
"""
- b_user_id = cls.convert_uuid_hex_to_bytes(user_id)
+ b_user_id = cls.attempt_convert_uuid_hex_to_bytes(user_id)
methods = auth_plugins.convert_method_list_to_integer(methods)
- b_project_id = cls.convert_uuid_hex_to_bytes(project_id)
+ b_project_id = cls.attempt_convert_uuid_hex_to_bytes(project_id)
expires_at_int = cls._convert_time_string_to_int(expires_at)
b_audit_ids = list(map(provider.random_urlsafe_str_to_bytes,
audit_ids))
@@ -429,9 +468,9 @@ class ProjectScopedPayload(BasePayload):
expires_at_str, and audit_ids
"""
- user_id = cls.convert_uuid_bytes_to_hex(payload[0])
+ user_id = cls.attempt_convert_uuid_bytes_to_hex(payload[0])
methods = auth_plugins.convert_integer_to_method_list(payload[1])
- project_id = cls.convert_uuid_bytes_to_hex(payload[2])
+ project_id = cls.attempt_convert_uuid_bytes_to_hex(payload[2])
expires_at_str = cls._convert_int_to_time_string(payload[3])
audit_ids = list(map(provider.base64_encode, payload[4]))
@@ -455,9 +494,9 @@ class TrustScopedPayload(BasePayload):
:returns: the payload of a trust-scoped token
"""
- b_user_id = cls.convert_uuid_hex_to_bytes(user_id)
+ b_user_id = cls.attempt_convert_uuid_hex_to_bytes(user_id)
methods = auth_plugins.convert_method_list_to_integer(methods)
- b_project_id = cls.convert_uuid_hex_to_bytes(project_id)
+ b_project_id = cls.attempt_convert_uuid_hex_to_bytes(project_id)
b_trust_id = cls.convert_uuid_hex_to_bytes(trust_id)
expires_at_int = cls._convert_time_string_to_int(expires_at)
b_audit_ids = list(map(provider.random_urlsafe_str_to_bytes,
@@ -475,9 +514,9 @@ class TrustScopedPayload(BasePayload):
expires_at_str, audit_ids, and trust_id
"""
- user_id = cls.convert_uuid_bytes_to_hex(payload[0])
+ user_id = cls.attempt_convert_uuid_bytes_to_hex(payload[0])
methods = auth_plugins.convert_integer_to_method_list(payload[1])
- project_id = cls.convert_uuid_bytes_to_hex(payload[2])
+ project_id = cls.attempt_convert_uuid_bytes_to_hex(payload[2])
expires_at_str = cls._convert_int_to_time_string(payload[3])
audit_ids = list(map(provider.base64_encode, payload[4]))
trust_id = cls.convert_uuid_bytes_to_hex(payload[5])
@@ -486,10 +525,19 @@ class TrustScopedPayload(BasePayload):
trust_id)
-class FederatedPayload(BasePayload):
+class FederatedUnscopedPayload(BasePayload):
version = 4
@classmethod
+ def pack_group_id(cls, group_dict):
+ return cls.attempt_convert_uuid_hex_to_bytes(group_dict['id'])
+
+ @classmethod
+ def unpack_group_id(cls, group_id_in_bytes):
+ group_id = cls.attempt_convert_uuid_bytes_to_hex(group_id_in_bytes)
+ return {'id': group_id}
+
+ @classmethod
def assemble(cls, user_id, methods, expires_at, audit_ids, federated_info):
"""Assemble the payload of a federated token.
@@ -503,24 +551,24 @@ class FederatedPayload(BasePayload):
:returns: the payload of a federated token
"""
- def pack_group_ids(group_dict):
- return cls.convert_uuid_hex_to_bytes(group_dict['id'])
b_user_id = cls.attempt_convert_uuid_hex_to_bytes(user_id)
methods = auth_plugins.convert_method_list_to_integer(methods)
- b_group_ids = map(pack_group_ids, federated_info['group_ids'])
+ b_group_ids = list(map(cls.pack_group_id,
+ federated_info['group_ids']))
b_idp_id = cls.attempt_convert_uuid_hex_to_bytes(
federated_info['idp_id'])
protocol_id = federated_info['protocol_id']
expires_at_int = cls._convert_time_string_to_int(expires_at)
- b_audit_ids = map(provider.random_urlsafe_str_to_bytes, audit_ids)
+ b_audit_ids = list(map(provider.random_urlsafe_str_to_bytes,
+ audit_ids))
return (b_user_id, methods, b_group_ids, b_idp_id, protocol_id,
expires_at_int, b_audit_ids)
@classmethod
def disassemble(cls, payload):
- """Validate a federated paylod.
+ """Validate a federated payload.
:param token_string: a string representing the token
:return: a tuple containing the user_id, auth methods, audit_ids, and
@@ -529,17 +577,81 @@ class FederatedPayload(BasePayload):
federated domain ID
"""
- def unpack_group_ids(group_id_in_bytes):
- group_id = cls.convert_uuid_bytes_to_hex(group_id_in_bytes)
- return {'id': group_id}
user_id = cls.attempt_convert_uuid_bytes_to_hex(payload[0])
methods = auth_plugins.convert_integer_to_method_list(payload[1])
- group_ids = map(unpack_group_ids, payload[2])
+ group_ids = list(map(cls.unpack_group_id, payload[2]))
idp_id = cls.attempt_convert_uuid_bytes_to_hex(payload[3])
protocol_id = payload[4]
expires_at_str = cls._convert_int_to_time_string(payload[5])
- audit_ids = map(provider.base64_encode, payload[6])
+ audit_ids = list(map(provider.base64_encode, payload[6]))
federated_info = dict(group_ids=group_ids, idp_id=idp_id,
protocol_id=protocol_id)
return (user_id, methods, expires_at_str, audit_ids, federated_info)
+
+
+class FederatedScopedPayload(FederatedUnscopedPayload):
+ version = None
+
+ @classmethod
+ def assemble(cls, user_id, methods, scope_id, expires_at, audit_ids,
+ federated_info):
+ """Assemble the project-scoped payload of a federated token.
+
+ :param user_id: ID of the user in the token request
+ :param methods: list of authentication methods used
+ :param scope_id: ID of the project or domain ID to scope to
+ :param expires_at: datetime of the token's expiration
+ :param audit_ids: list of the token's audit IDs
+ :param federated_info: dictionary containing the identity provider ID,
+ protocol ID, federated domain ID and group IDs
+ :returns: the payload of a federated token
+
+ """
+ b_user_id = cls.attempt_convert_uuid_hex_to_bytes(user_id)
+ methods = auth_plugins.convert_method_list_to_integer(methods)
+ b_scope_id = cls.attempt_convert_uuid_hex_to_bytes(scope_id)
+ b_group_ids = list(map(cls.pack_group_id,
+ federated_info['group_ids']))
+ b_idp_id = cls.attempt_convert_uuid_hex_to_bytes(
+ federated_info['idp_id'])
+ protocol_id = federated_info['protocol_id']
+ expires_at_int = cls._convert_time_string_to_int(expires_at)
+ b_audit_ids = list(map(provider.random_urlsafe_str_to_bytes,
+ audit_ids))
+
+ return (b_user_id, methods, b_scope_id, b_group_ids, b_idp_id,
+ protocol_id, expires_at_int, b_audit_ids)
+
+ @classmethod
+ def disassemble(cls, payload):
+ """Validate a project-scoped federated payload.
+
+ :param token_string: a string representing the token
+ :returns: a tuple containing the user_id, auth methods, scope_id,
+ expiration time (as str), audit_ids, and a dictionary
+ containing federated information such as the the identity
+ provider ID, the protocol ID, the federated domain ID and
+ group IDs
+
+ """
+ user_id = cls.attempt_convert_uuid_bytes_to_hex(payload[0])
+ methods = auth_plugins.convert_integer_to_method_list(payload[1])
+ scope_id = cls.attempt_convert_uuid_bytes_to_hex(payload[2])
+ group_ids = list(map(cls.unpack_group_id, payload[3]))
+ idp_id = cls.attempt_convert_uuid_bytes_to_hex(payload[4])
+ protocol_id = payload[5]
+ expires_at_str = cls._convert_int_to_time_string(payload[6])
+ audit_ids = list(map(provider.base64_encode, payload[7]))
+ federated_info = dict(idp_id=idp_id, protocol_id=protocol_id,
+ group_ids=group_ids)
+ return (user_id, methods, scope_id, expires_at_str, audit_ids,
+ federated_info)
+
+
+class FederatedProjectScopedPayload(FederatedScopedPayload):
+ version = 5
+
+
+class FederatedDomainScopedPayload(FederatedScopedPayload):
+ version = 6
diff --git a/keystone-moon/keystone/token/providers/fernet/utils.py b/keystone-moon/keystone/token/providers/fernet/utils.py
index 56624ee5..4235eda8 100644
--- a/keystone-moon/keystone/token/providers/fernet/utils.py
+++ b/keystone-moon/keystone/token/providers/fernet/utils.py
@@ -59,8 +59,8 @@ def _convert_to_integers(id_value):
try:
id_int = int(id_value)
except ValueError as e:
- msg = ('Unable to convert Keystone user or group ID. Error: %s', e)
- LOG.error(msg)
+ msg = _LE('Unable to convert Keystone user or group ID. Error: %s')
+ LOG.error(msg, e)
raise
return id_int
@@ -174,11 +174,16 @@ def rotate_keys(keystone_user_id=None, keystone_group_id=None):
for filename in os.listdir(CONF.fernet_tokens.key_repository):
path = os.path.join(CONF.fernet_tokens.key_repository, str(filename))
if os.path.isfile(path):
- key_files[int(filename)] = path
+ try:
+ key_id = int(filename)
+ except ValueError:
+ pass
+ else:
+ key_files[key_id] = path
LOG.info(_LI('Starting key rotation with %(count)s key files: %(list)s'), {
'count': len(key_files),
- 'list': key_files.values()})
+ 'list': list(key_files.values())})
# determine the number of the new primary key
current_primary_key = max(key_files.keys())
@@ -199,20 +204,24 @@ def rotate_keys(keystone_user_id=None, keystone_group_id=None):
# add a new key to the rotation, which will be the *next* primary
_create_new_key(keystone_user_id, keystone_group_id)
+ max_active_keys = CONF.fernet_tokens.max_active_keys
# check for bad configuration
- if CONF.fernet_tokens.max_active_keys < 1:
+ if max_active_keys < 1:
LOG.warning(_LW(
'[fernet_tokens] max_active_keys must be at least 1 to maintain a '
'primary key.'))
- CONF.fernet_tokens.max_active_keys = 1
+ max_active_keys = 1
# purge excess keys
- keys = sorted(key_files.keys())
- excess_keys = (
- keys[:len(key_files) - CONF.fernet_tokens.max_active_keys + 1])
- LOG.info(_LI('Excess keys to purge: %s'), excess_keys)
- for i in excess_keys:
- os.remove(key_files[i])
+
+ # Note that key_files doesn't contain the new active key that was created,
+ # only the old active keys.
+ keys = sorted(key_files.keys(), reverse=True)
+ while len(keys) > (max_active_keys - 1):
+ index_to_purge = keys.pop()
+ key_to_purge = key_files[index_to_purge]
+ LOG.info(_LI('Excess key to purge: %s'), key_to_purge)
+ os.remove(key_to_purge)
def load_keys():
@@ -232,12 +241,25 @@ def load_keys():
path = os.path.join(CONF.fernet_tokens.key_repository, str(filename))
if os.path.isfile(path):
with open(path, 'r') as key_file:
- keys[int(filename)] = key_file.read()
-
- LOG.info(_LI(
- 'Loaded %(count)s encryption keys from: %(dir)s'), {
- 'count': len(keys),
- 'dir': CONF.fernet_tokens.key_repository})
+ try:
+ key_id = int(filename)
+ except ValueError:
+ pass
+ else:
+ keys[key_id] = key_file.read()
+
+ if len(keys) != CONF.fernet_tokens.max_active_keys:
+ # If there haven't been enough key rotations to reach max_active_keys,
+ # or if the configured value of max_active_keys has changed since the
+ # last rotation, then reporting the discrepancy might be useful. Once
+ # the number of keys matches max_active_keys, this log entry is too
+ # repetitive to be useful.
+ LOG.info(_LI(
+ 'Loaded %(count)d encryption keys (max_active_keys=%(max)d) from: '
+ '%(dir)s'), {
+ 'count': len(keys),
+ 'max': CONF.fernet_tokens.max_active_keys,
+ 'dir': CONF.fernet_tokens.key_repository})
# return the encryption_keys, sorted by key number, descending
return [keys[x] for x in sorted(keys.keys(), reverse=True)]
diff --git a/keystone-moon/keystone/token/providers/pki.py b/keystone-moon/keystone/token/providers/pki.py
index 61b42817..af8dc739 100644
--- a/keystone-moon/keystone/token/providers/pki.py
+++ b/keystone-moon/keystone/token/providers/pki.py
@@ -48,6 +48,14 @@ class Provider(common.BaseProvider):
raise exception.UnexpectedError(_(
'Unable to sign token.'))
+ @property
+ def _supports_bind_authentication(self):
+ """Return if the token provider supports bind authentication methods.
+
+ :returns: True
+ """
+ return True
+
def needs_persistence(self):
"""Should the token be written to a backend."""
return True
diff --git a/keystone-moon/keystone/token/providers/pkiz.py b/keystone-moon/keystone/token/providers/pkiz.py
index b6f2944d..b4e31918 100644
--- a/keystone-moon/keystone/token/providers/pkiz.py
+++ b/keystone-moon/keystone/token/providers/pkiz.py
@@ -46,6 +46,14 @@ class Provider(common.BaseProvider):
LOG.exception(ERROR_MESSAGE)
raise exception.UnexpectedError(ERROR_MESSAGE)
+ @property
+ def _supports_bind_authentication(self):
+ """Return if the token provider supports bind authentication methods.
+
+ :returns: True
+ """
+ return True
+
def needs_persistence(self):
"""Should the token be written to a backend."""
return True
diff --git a/keystone-moon/keystone/token/providers/uuid.py b/keystone-moon/keystone/token/providers/uuid.py
index 15118d82..f9a91617 100644
--- a/keystone-moon/keystone/token/providers/uuid.py
+++ b/keystone-moon/keystone/token/providers/uuid.py
@@ -28,6 +28,14 @@ class Provider(common.BaseProvider):
def _get_token_id(self, token_data):
return uuid.uuid4().hex
+ @property
+ def _supports_bind_authentication(self):
+ """Return if the token provider supports bind authentication methods.
+
+ :returns: True
+ """
+ return True
+
def needs_persistence(self):
"""Should the token be written to a backend."""
return True