diff options
Diffstat (limited to 'keystone-moon/keystone/token/providers/fernet')
3 files changed, 318 insertions, 157 deletions
diff --git a/keystone-moon/keystone/token/providers/fernet/core.py b/keystone-moon/keystone/token/providers/fernet/core.py index b1da263b..1bbacb03 100644 --- a/keystone-moon/keystone/token/providers/fernet/core.py +++ b/keystone-moon/keystone/token/providers/fernet/core.py @@ -14,7 +14,8 @@ from oslo_config import cfg from oslo_log import log from keystone.common import dependency -from keystone.contrib import federation +from keystone.common import utils as ks_utils +from keystone.contrib.federation import constants as federation_constants from keystone import exception from keystone.i18n import _ from keystone.token import provider @@ -59,6 +60,9 @@ class Provider(common.BaseProvider): if token_ref.get('tenant'): project_id = token_ref['tenant']['id'] + # maintain expiration time across rescopes + expires = token_ref.get('expires') + parent_audit_id = token_ref.get('parent_audit_id') # If parent_audit_id is defined then a token authentication was made if parent_audit_id: @@ -80,136 +84,132 @@ class Provider(common.BaseProvider): project_id=project_id, token=token_ref, include_catalog=False, - audit_info=audit_ids) + audit_info=audit_ids, + expires=expires) expires_at = v3_token_data['token']['expires_at'] token_id = self.token_formatter.create_token(user_id, expires_at, audit_ids, methods=method_names, project_id=project_id) + self._build_issued_at_info(token_id, v3_token_data) # Convert v3 to v2 token data and build v2 catalog - token_data = self.v2_token_data_helper.v3_to_v2_token(token_id, - v3_token_data) + token_data = self.v2_token_data_helper.v3_to_v2_token(v3_token_data) + token_data['access']['token']['id'] = token_id + + return token_id, token_data + def issue_v3_token(self, *args, **kwargs): + token_id, token_data = super(Provider, self).issue_v3_token( + *args, **kwargs) + self._build_issued_at_info(token_id, token_data) return token_id, token_data + def _build_issued_at_info(self, token_id, token_data): + # NOTE(roxanaghe, lbragstad): We must use the creation time that + # Fernet builds into it's token. The Fernet spec details that the + # token creation time is built into the token, outside of the payload + # provided by Keystone. This is the reason why we don't pass the + # issued_at time in the payload. This also means that we shouldn't + # return a token reference with a creation time that we created + # when Fernet uses a different creation time. We should use the + # creation time provided by Fernet because it's the creation time + # that we have to rely on when we validate the token. + fernet_creation_datetime_obj = self.token_formatter.creation_time( + token_id) + token_data['token']['issued_at'] = ks_utils.isotime( + at=fernet_creation_datetime_obj, subsecond=True) + def _build_federated_info(self, token_data): """Extract everything needed for federated tokens. - This dictionary is passed to the FederatedPayload token formatter, - which unpacks the values and builds the Fernet token. + This dictionary is passed to federated token formatters, which unpack + the values and build federated Fernet tokens. """ - group_ids = token_data.get('user', {}).get( - federation.FEDERATION, {}).get('groups') - idp_id = token_data.get('user', {}).get( - federation.FEDERATION, {}).get('identity_provider', {}).get('id') - protocol_id = token_data.get('user', {}).get( - federation.FEDERATION, {}).get('protocol', {}).get('id') - if not group_ids: - group_ids = list() - federated_dict = dict(group_ids=group_ids, idp_id=idp_id, - protocol_id=protocol_id) - return federated_dict + idp_id = token_data['token'].get('user', {}).get( + federation_constants.FEDERATION, {}).get( + 'identity_provider', {}).get('id') + protocol_id = token_data['token'].get('user', {}).get( + federation_constants.FEDERATION, {}).get('protocol', {}).get('id') + # If we don't have an identity provider ID and a protocol ID, it's safe + # to assume we aren't dealing with a federated token. + if not (idp_id and protocol_id): + return None + + group_ids = token_data['token'].get('user', {}).get( + federation_constants.FEDERATION, {}).get('groups') + + return {'group_ids': group_ids, + 'idp_id': idp_id, + 'protocol_id': protocol_id} def _rebuild_federated_info(self, federated_dict, user_id): """Format federated information into the token reference. - The federated_dict is passed back from the FederatedPayload token - formatter. The responsibility of this method is to format the - information passed back from the token formatter into the token - reference before constructing the token data from the - V3TokenDataHelper. + The federated_dict is passed back from the federated token formatters. + The responsibility of this method is to format the information passed + back from the token formatter into the token reference before + constructing the token data from the V3TokenDataHelper. """ g_ids = federated_dict['group_ids'] idp_id = federated_dict['idp_id'] protocol_id = federated_dict['protocol_id'] - federated_info = dict(groups=g_ids, - identity_provider=dict(id=idp_id), - protocol=dict(id=protocol_id)) - token_dict = {'user': {federation.FEDERATION: federated_info}} - token_dict['user']['id'] = user_id - token_dict['user']['name'] = user_id + + federated_info = { + 'groups': g_ids, + 'identity_provider': {'id': idp_id}, + 'protocol': {'id': protocol_id} + } + + token_dict = { + 'user': { + federation_constants.FEDERATION: federated_info, + 'id': user_id, + 'name': user_id + } + } + return token_dict - def issue_v3_token(self, user_id, method_names, expires_at=None, - project_id=None, domain_id=None, auth_context=None, - trust=None, metadata_ref=None, include_catalog=True, - parent_audit_id=None): - """Issue a V3 formatted token. - - Here is where we need to detect what is given to us, and what kind of - token the user is expecting. Depending on the outcome of that, we can - pass all the information to be packed to the proper token format - handler. - - :param user_id: ID of the user - :param method_names: method of authentication - :param expires_at: token expiration time - :param project_id: ID of the project being scoped to - :param domain_id: ID of the domain being scoped to - :param auth_context: authentication context - :param trust: ID of the trust - :param metadata_ref: metadata reference - :param include_catalog: return the catalog in the response if True, - otherwise don't return the catalog - :param parent_audit_id: ID of the patent audit entity - :returns: tuple containing the id of the token and the token data + def _rebuild_federated_token_roles(self, token_dict, federated_dict, + user_id, project_id, domain_id): + """Populate roles based on (groups, project/domain) pair. - """ - # TODO(lbragstad): Currently, Fernet tokens don't support bind in the - # token format. Raise a 501 if we're dealing with bind. - if auth_context.get('bind'): - raise exception.NotImplemented() + We must populate roles from (groups, project/domain) as ephemeral users + don't exist in the backend. Upon success, a ``roles`` key will be added + to ``token_dict``. - token_ref = None - # NOTE(lbragstad): This determines if we are dealing with a federated - # token or not. The groups for the user will be in the returned token - # reference. - federated_dict = None - if auth_context and self._is_mapped_token(auth_context): - token_ref = self._handle_mapped_tokens( - auth_context, project_id, domain_id) - federated_dict = self._build_federated_info(token_ref) - - token_data = self.v3_token_data_helper.get_token_data( - user_id, - method_names, - auth_context.get('extras') if auth_context else None, - domain_id=domain_id, - project_id=project_id, - expires=expires_at, - trust=trust, - bind=auth_context.get('bind') if auth_context else None, - token=token_ref, - include_catalog=include_catalog, - audit_info=parent_audit_id) + :param token_dict: dictionary with data used for building token + :param federated_dict: federated information such as identity provider + protocol and set of group IDs + :param user_id: user ID + :param project_id: project ID the token is being scoped to + :param domain_id: domain ID the token is being scoped to - token = self.token_formatter.create_token( - user_id, - token_data['token']['expires_at'], - token_data['token']['audit_ids'], - methods=method_names, - domain_id=domain_id, - project_id=project_id, - trust_id=token_data['token'].get('OS-TRUST:trust', {}).get('id'), - federated_info=federated_dict) - return token, token_data + """ + group_ids = [x['id'] for x in federated_dict['group_ids']] + self.v3_token_data_helper.populate_roles_for_groups( + token_dict, group_ids, project_id, domain_id, user_id) def validate_v2_token(self, token_ref): """Validate a V2 formatted token. :param token_ref: reference describing the token to validate :returns: the token data + :raises keystone.exception.TokenNotFound: if token format is invalid :raises keystone.exception.Unauthorized: if v3 token is used """ - (user_id, methods, - audit_ids, domain_id, - project_id, trust_id, - federated_info, created_at, - expires_at) = self.token_formatter.validate_token(token_ref) + try: + (user_id, methods, + audit_ids, domain_id, + project_id, trust_id, + federated_info, created_at, + expires_at) = self.token_formatter.validate_token(token_ref) + except exception.ValidationError as e: + raise exception.TokenNotFound(e) if trust_id or domain_id or federated_info: msg = _('This is not a v2.0 Fernet token. Use v3 for trust, ' @@ -225,26 +225,36 @@ class Provider(common.BaseProvider): token=token_ref, include_catalog=False, audit_info=audit_ids) - return self.v2_token_data_helper.v3_to_v2_token(token_ref, - v3_token_data) + token_data = self.v2_token_data_helper.v3_to_v2_token(v3_token_data) + token_data['access']['token']['id'] = token_ref + return token_data def validate_v3_token(self, token): """Validate a V3 formatted token. :param token: a string describing the token to validate :returns: the token data - :raises keystone.exception.Unauthorized: if token format version isn't + :raises keystone.exception.TokenNotFound: if token format version isn't supported """ - (user_id, methods, audit_ids, domain_id, project_id, trust_id, - federated_info, created_at, expires_at) = ( - self.token_formatter.validate_token(token)) + try: + (user_id, methods, audit_ids, domain_id, project_id, trust_id, + federated_info, created_at, expires_at) = ( + self.token_formatter.validate_token(token)) + except exception.ValidationError as e: + raise exception.TokenNotFound(e) token_dict = None + trust_ref = None if federated_info: token_dict = self._rebuild_federated_info(federated_info, user_id) - trust_ref = self.trust_api.get_trust(trust_id) + if project_id or domain_id: + self._rebuild_federated_token_roles(token_dict, federated_info, + user_id, project_id, + domain_id) + if trust_id: + trust_ref = self.trust_api.get_trust(trust_id) return self.v3_token_data_helper.get_token_data( user_id, @@ -264,4 +274,21 @@ class Provider(common.BaseProvider): :type token_data: dict :raises keystone.exception.NotImplemented: when called """ - raise exception.NotImplemented() + return self.token_formatter.create_token( + token_data['token']['user']['id'], + token_data['token']['expires_at'], + token_data['token']['audit_ids'], + methods=token_data['token'].get('methods'), + domain_id=token_data['token'].get('domain', {}).get('id'), + project_id=token_data['token'].get('project', {}).get('id'), + trust_id=token_data['token'].get('OS-TRUST:trust', {}).get('id'), + federated_info=self._build_federated_info(token_data) + ) + + @property + def _supports_bind_authentication(self): + """Return if the token provider supports bind authentication methods. + + :returns: False + """ + return False diff --git a/keystone-moon/keystone/token/providers/fernet/token_formatters.py b/keystone-moon/keystone/token/providers/fernet/token_formatters.py index 50960923..d1dbb08c 100644 --- a/keystone-moon/keystone/token/providers/fernet/token_formatters.py +++ b/keystone-moon/keystone/token/providers/fernet/token_formatters.py @@ -21,11 +21,12 @@ from oslo_config import cfg from oslo_log import log from oslo_utils import timeutils import six -from six.moves import urllib +from six.moves import map, urllib from keystone.auth import plugins as auth_plugins +from keystone.common import utils as ks_utils from keystone import exception -from keystone.i18n import _ +from keystone.i18n import _, _LI from keystone.token import provider from keystone.token.providers.fernet import utils @@ -60,7 +61,7 @@ class TokenFormatter(object): if not keys: raise exception.KeysNotFound() - fernet_instances = [fernet.Fernet(key) for key in utils.load_keys()] + fernet_instances = [fernet.Fernet(key) for key in keys] return fernet.MultiFernet(fernet_instances) def pack(self, payload): @@ -74,8 +75,9 @@ class TokenFormatter(object): try: return self.crypto.decrypt(token) - except fernet.InvalidToken as e: - raise exception.Unauthorized(six.text_type(e)) + except fernet.InvalidToken: + raise exception.ValidationError( + _('This is not a recognized Fernet token')) @classmethod def creation_time(cls, fernet_token): @@ -115,9 +117,27 @@ class TokenFormatter(object): expires_at, audit_ids, trust_id) + elif project_id and federated_info: + version = FederatedProjectScopedPayload.version + payload = FederatedProjectScopedPayload.assemble( + user_id, + methods, + project_id, + expires_at, + audit_ids, + federated_info) + elif domain_id and federated_info: + version = FederatedDomainScopedPayload.version + payload = FederatedDomainScopedPayload.assemble( + user_id, + methods, + domain_id, + expires_at, + audit_ids, + federated_info) elif federated_info: - version = FederatedPayload.version - payload = FederatedPayload.assemble( + version = FederatedUnscopedPayload.version + payload = FederatedUnscopedPayload.assemble( user_id, methods, expires_at, @@ -151,6 +171,17 @@ class TokenFormatter(object): serialized_payload = msgpack.packb(versioned_payload) token = self.pack(serialized_payload) + # NOTE(lbragstad): We should warn against Fernet tokens that are over + # 255 characters in length. This is mostly due to persisting the tokens + # in a backend store of some kind that might have a limit of 255 + # characters. Even though Keystone isn't storing a Fernet token + # anywhere, we can't say it isn't being stored somewhere else with + # those kind of backend constraints. + if len(token) > 255: + LOG.info(_LI('Fernet token created with length of %d ' + 'characters, which exceeds 255 characters'), + len(token)) + return token def validate_token(self, token): @@ -181,21 +212,29 @@ class TokenFormatter(object): elif version == TrustScopedPayload.version: (user_id, methods, project_id, expires_at, audit_ids, trust_id) = ( TrustScopedPayload.disassemble(payload)) - elif version == FederatedPayload.version: + elif version == FederatedUnscopedPayload.version: (user_id, methods, expires_at, audit_ids, federated_info) = ( - FederatedPayload.disassemble(payload)) + FederatedUnscopedPayload.disassemble(payload)) + elif version == FederatedProjectScopedPayload.version: + (user_id, methods, project_id, expires_at, audit_ids, + federated_info) = FederatedProjectScopedPayload.disassemble( + payload) + elif version == FederatedDomainScopedPayload.version: + (user_id, methods, domain_id, expires_at, audit_ids, + federated_info) = FederatedDomainScopedPayload.disassemble( + payload) else: - # If the token_format is not recognized, raise Unauthorized. - raise exception.Unauthorized(_( + # If the token_format is not recognized, raise ValidationError. + raise exception.ValidationError(_( 'This is not a recognized Fernet payload version: %s') % version) # rather than appearing in the payload, the creation time is encoded # into the token format itself created_at = TokenFormatter.creation_time(token) - created_at = timeutils.isotime(at=created_at, subsecond=True) + created_at = ks_utils.isotime(at=created_at, subsecond=True) expires_at = timeutils.parse_isotime(expires_at) - expires_at = timeutils.isotime(at=expires_at, subsecond=True) + expires_at = ks_utils.isotime(at=expires_at, subsecond=True) return (user_id, methods, audit_ids, domain_id, project_id, trust_id, federated_info, created_at, expires_at) @@ -273,8 +312,8 @@ class BasePayload(object): :returns: a time formatted strings """ - time_object = datetime.datetime.utcfromtimestamp(int(time_int)) - return timeutils.isotime(time_object) + time_object = datetime.datetime.utcfromtimestamp(time_int) + return ks_utils.isotime(time_object, subsecond=True) @classmethod def attempt_convert_uuid_hex_to_bytes(cls, value): @@ -319,7 +358,7 @@ class UnscopedPayload(BasePayload): :returns: the payload of an unscoped token """ - b_user_id = cls.convert_uuid_hex_to_bytes(user_id) + b_user_id = cls.attempt_convert_uuid_hex_to_bytes(user_id) methods = auth_plugins.convert_method_list_to_integer(methods) expires_at_int = cls._convert_time_string_to_int(expires_at) b_audit_ids = list(map(provider.random_urlsafe_str_to_bytes, @@ -335,7 +374,7 @@ class UnscopedPayload(BasePayload): audit_ids """ - user_id = cls.convert_uuid_bytes_to_hex(payload[0]) + user_id = cls.attempt_convert_uuid_bytes_to_hex(payload[0]) methods = auth_plugins.convert_integer_to_method_list(payload[1]) expires_at_str = cls._convert_int_to_time_string(payload[2]) audit_ids = list(map(provider.base64_encode, payload[3])) @@ -357,7 +396,7 @@ class DomainScopedPayload(BasePayload): :returns: the payload of a domain-scoped token """ - b_user_id = cls.convert_uuid_hex_to_bytes(user_id) + b_user_id = cls.attempt_convert_uuid_hex_to_bytes(user_id) methods = auth_plugins.convert_method_list_to_integer(methods) try: b_domain_id = cls.convert_uuid_hex_to_bytes(domain_id) @@ -381,7 +420,7 @@ class DomainScopedPayload(BasePayload): expires_at_str, and audit_ids """ - user_id = cls.convert_uuid_bytes_to_hex(payload[0]) + user_id = cls.attempt_convert_uuid_bytes_to_hex(payload[0]) methods = auth_plugins.convert_integer_to_method_list(payload[1]) try: domain_id = cls.convert_uuid_bytes_to_hex(payload[2]) @@ -412,9 +451,9 @@ class ProjectScopedPayload(BasePayload): :returns: the payload of a project-scoped token """ - b_user_id = cls.convert_uuid_hex_to_bytes(user_id) + b_user_id = cls.attempt_convert_uuid_hex_to_bytes(user_id) methods = auth_plugins.convert_method_list_to_integer(methods) - b_project_id = cls.convert_uuid_hex_to_bytes(project_id) + b_project_id = cls.attempt_convert_uuid_hex_to_bytes(project_id) expires_at_int = cls._convert_time_string_to_int(expires_at) b_audit_ids = list(map(provider.random_urlsafe_str_to_bytes, audit_ids)) @@ -429,9 +468,9 @@ class ProjectScopedPayload(BasePayload): expires_at_str, and audit_ids """ - user_id = cls.convert_uuid_bytes_to_hex(payload[0]) + user_id = cls.attempt_convert_uuid_bytes_to_hex(payload[0]) methods = auth_plugins.convert_integer_to_method_list(payload[1]) - project_id = cls.convert_uuid_bytes_to_hex(payload[2]) + project_id = cls.attempt_convert_uuid_bytes_to_hex(payload[2]) expires_at_str = cls._convert_int_to_time_string(payload[3]) audit_ids = list(map(provider.base64_encode, payload[4])) @@ -455,9 +494,9 @@ class TrustScopedPayload(BasePayload): :returns: the payload of a trust-scoped token """ - b_user_id = cls.convert_uuid_hex_to_bytes(user_id) + b_user_id = cls.attempt_convert_uuid_hex_to_bytes(user_id) methods = auth_plugins.convert_method_list_to_integer(methods) - b_project_id = cls.convert_uuid_hex_to_bytes(project_id) + b_project_id = cls.attempt_convert_uuid_hex_to_bytes(project_id) b_trust_id = cls.convert_uuid_hex_to_bytes(trust_id) expires_at_int = cls._convert_time_string_to_int(expires_at) b_audit_ids = list(map(provider.random_urlsafe_str_to_bytes, @@ -475,9 +514,9 @@ class TrustScopedPayload(BasePayload): expires_at_str, audit_ids, and trust_id """ - user_id = cls.convert_uuid_bytes_to_hex(payload[0]) + user_id = cls.attempt_convert_uuid_bytes_to_hex(payload[0]) methods = auth_plugins.convert_integer_to_method_list(payload[1]) - project_id = cls.convert_uuid_bytes_to_hex(payload[2]) + project_id = cls.attempt_convert_uuid_bytes_to_hex(payload[2]) expires_at_str = cls._convert_int_to_time_string(payload[3]) audit_ids = list(map(provider.base64_encode, payload[4])) trust_id = cls.convert_uuid_bytes_to_hex(payload[5]) @@ -486,10 +525,19 @@ class TrustScopedPayload(BasePayload): trust_id) -class FederatedPayload(BasePayload): +class FederatedUnscopedPayload(BasePayload): version = 4 @classmethod + def pack_group_id(cls, group_dict): + return cls.attempt_convert_uuid_hex_to_bytes(group_dict['id']) + + @classmethod + def unpack_group_id(cls, group_id_in_bytes): + group_id = cls.attempt_convert_uuid_bytes_to_hex(group_id_in_bytes) + return {'id': group_id} + + @classmethod def assemble(cls, user_id, methods, expires_at, audit_ids, federated_info): """Assemble the payload of a federated token. @@ -503,24 +551,24 @@ class FederatedPayload(BasePayload): :returns: the payload of a federated token """ - def pack_group_ids(group_dict): - return cls.convert_uuid_hex_to_bytes(group_dict['id']) b_user_id = cls.attempt_convert_uuid_hex_to_bytes(user_id) methods = auth_plugins.convert_method_list_to_integer(methods) - b_group_ids = map(pack_group_ids, federated_info['group_ids']) + b_group_ids = list(map(cls.pack_group_id, + federated_info['group_ids'])) b_idp_id = cls.attempt_convert_uuid_hex_to_bytes( federated_info['idp_id']) protocol_id = federated_info['protocol_id'] expires_at_int = cls._convert_time_string_to_int(expires_at) - b_audit_ids = map(provider.random_urlsafe_str_to_bytes, audit_ids) + b_audit_ids = list(map(provider.random_urlsafe_str_to_bytes, + audit_ids)) return (b_user_id, methods, b_group_ids, b_idp_id, protocol_id, expires_at_int, b_audit_ids) @classmethod def disassemble(cls, payload): - """Validate a federated paylod. + """Validate a federated payload. :param token_string: a string representing the token :return: a tuple containing the user_id, auth methods, audit_ids, and @@ -529,17 +577,81 @@ class FederatedPayload(BasePayload): federated domain ID """ - def unpack_group_ids(group_id_in_bytes): - group_id = cls.convert_uuid_bytes_to_hex(group_id_in_bytes) - return {'id': group_id} user_id = cls.attempt_convert_uuid_bytes_to_hex(payload[0]) methods = auth_plugins.convert_integer_to_method_list(payload[1]) - group_ids = map(unpack_group_ids, payload[2]) + group_ids = list(map(cls.unpack_group_id, payload[2])) idp_id = cls.attempt_convert_uuid_bytes_to_hex(payload[3]) protocol_id = payload[4] expires_at_str = cls._convert_int_to_time_string(payload[5]) - audit_ids = map(provider.base64_encode, payload[6]) + audit_ids = list(map(provider.base64_encode, payload[6])) federated_info = dict(group_ids=group_ids, idp_id=idp_id, protocol_id=protocol_id) return (user_id, methods, expires_at_str, audit_ids, federated_info) + + +class FederatedScopedPayload(FederatedUnscopedPayload): + version = None + + @classmethod + def assemble(cls, user_id, methods, scope_id, expires_at, audit_ids, + federated_info): + """Assemble the project-scoped payload of a federated token. + + :param user_id: ID of the user in the token request + :param methods: list of authentication methods used + :param scope_id: ID of the project or domain ID to scope to + :param expires_at: datetime of the token's expiration + :param audit_ids: list of the token's audit IDs + :param federated_info: dictionary containing the identity provider ID, + protocol ID, federated domain ID and group IDs + :returns: the payload of a federated token + + """ + b_user_id = cls.attempt_convert_uuid_hex_to_bytes(user_id) + methods = auth_plugins.convert_method_list_to_integer(methods) + b_scope_id = cls.attempt_convert_uuid_hex_to_bytes(scope_id) + b_group_ids = list(map(cls.pack_group_id, + federated_info['group_ids'])) + b_idp_id = cls.attempt_convert_uuid_hex_to_bytes( + federated_info['idp_id']) + protocol_id = federated_info['protocol_id'] + expires_at_int = cls._convert_time_string_to_int(expires_at) + b_audit_ids = list(map(provider.random_urlsafe_str_to_bytes, + audit_ids)) + + return (b_user_id, methods, b_scope_id, b_group_ids, b_idp_id, + protocol_id, expires_at_int, b_audit_ids) + + @classmethod + def disassemble(cls, payload): + """Validate a project-scoped federated payload. + + :param token_string: a string representing the token + :returns: a tuple containing the user_id, auth methods, scope_id, + expiration time (as str), audit_ids, and a dictionary + containing federated information such as the the identity + provider ID, the protocol ID, the federated domain ID and + group IDs + + """ + user_id = cls.attempt_convert_uuid_bytes_to_hex(payload[0]) + methods = auth_plugins.convert_integer_to_method_list(payload[1]) + scope_id = cls.attempt_convert_uuid_bytes_to_hex(payload[2]) + group_ids = list(map(cls.unpack_group_id, payload[3])) + idp_id = cls.attempt_convert_uuid_bytes_to_hex(payload[4]) + protocol_id = payload[5] + expires_at_str = cls._convert_int_to_time_string(payload[6]) + audit_ids = list(map(provider.base64_encode, payload[7])) + federated_info = dict(idp_id=idp_id, protocol_id=protocol_id, + group_ids=group_ids) + return (user_id, methods, scope_id, expires_at_str, audit_ids, + federated_info) + + +class FederatedProjectScopedPayload(FederatedScopedPayload): + version = 5 + + +class FederatedDomainScopedPayload(FederatedScopedPayload): + version = 6 diff --git a/keystone-moon/keystone/token/providers/fernet/utils.py b/keystone-moon/keystone/token/providers/fernet/utils.py index 56624ee5..4235eda8 100644 --- a/keystone-moon/keystone/token/providers/fernet/utils.py +++ b/keystone-moon/keystone/token/providers/fernet/utils.py @@ -59,8 +59,8 @@ def _convert_to_integers(id_value): try: id_int = int(id_value) except ValueError as e: - msg = ('Unable to convert Keystone user or group ID. Error: %s', e) - LOG.error(msg) + msg = _LE('Unable to convert Keystone user or group ID. Error: %s') + LOG.error(msg, e) raise return id_int @@ -174,11 +174,16 @@ def rotate_keys(keystone_user_id=None, keystone_group_id=None): for filename in os.listdir(CONF.fernet_tokens.key_repository): path = os.path.join(CONF.fernet_tokens.key_repository, str(filename)) if os.path.isfile(path): - key_files[int(filename)] = path + try: + key_id = int(filename) + except ValueError: + pass + else: + key_files[key_id] = path LOG.info(_LI('Starting key rotation with %(count)s key files: %(list)s'), { 'count': len(key_files), - 'list': key_files.values()}) + 'list': list(key_files.values())}) # determine the number of the new primary key current_primary_key = max(key_files.keys()) @@ -199,20 +204,24 @@ def rotate_keys(keystone_user_id=None, keystone_group_id=None): # add a new key to the rotation, which will be the *next* primary _create_new_key(keystone_user_id, keystone_group_id) + max_active_keys = CONF.fernet_tokens.max_active_keys # check for bad configuration - if CONF.fernet_tokens.max_active_keys < 1: + if max_active_keys < 1: LOG.warning(_LW( '[fernet_tokens] max_active_keys must be at least 1 to maintain a ' 'primary key.')) - CONF.fernet_tokens.max_active_keys = 1 + max_active_keys = 1 # purge excess keys - keys = sorted(key_files.keys()) - excess_keys = ( - keys[:len(key_files) - CONF.fernet_tokens.max_active_keys + 1]) - LOG.info(_LI('Excess keys to purge: %s'), excess_keys) - for i in excess_keys: - os.remove(key_files[i]) + + # Note that key_files doesn't contain the new active key that was created, + # only the old active keys. + keys = sorted(key_files.keys(), reverse=True) + while len(keys) > (max_active_keys - 1): + index_to_purge = keys.pop() + key_to_purge = key_files[index_to_purge] + LOG.info(_LI('Excess key to purge: %s'), key_to_purge) + os.remove(key_to_purge) def load_keys(): @@ -232,12 +241,25 @@ def load_keys(): path = os.path.join(CONF.fernet_tokens.key_repository, str(filename)) if os.path.isfile(path): with open(path, 'r') as key_file: - keys[int(filename)] = key_file.read() - - LOG.info(_LI( - 'Loaded %(count)s encryption keys from: %(dir)s'), { - 'count': len(keys), - 'dir': CONF.fernet_tokens.key_repository}) + try: + key_id = int(filename) + except ValueError: + pass + else: + keys[key_id] = key_file.read() + + if len(keys) != CONF.fernet_tokens.max_active_keys: + # If there haven't been enough key rotations to reach max_active_keys, + # or if the configured value of max_active_keys has changed since the + # last rotation, then reporting the discrepancy might be useful. Once + # the number of keys matches max_active_keys, this log entry is too + # repetitive to be useful. + LOG.info(_LI( + 'Loaded %(count)d encryption keys (max_active_keys=%(max)d) from: ' + '%(dir)s'), { + 'count': len(keys), + 'max': CONF.fernet_tokens.max_active_keys, + 'dir': CONF.fernet_tokens.key_repository}) # return the encryption_keys, sorted by key number, descending return [keys[x] for x in sorted(keys.keys(), reverse=True)] |