From 2e7b4f2027a1147ca28301e4f88adf8274b39a1f Mon Sep 17 00:00:00 2001 From: DUVAL Thomas Date: Thu, 9 Jun 2016 09:11:50 +0200 Subject: Update Keystone core to Mitaka. Change-Id: Ia10d6add16f4a9d25d1f42d420661c46332e69db --- .../keystone/token/persistence/__init__.py | 2 +- .../keystone/token/persistence/backends/kvs.py | 23 +++- .../token/persistence/backends/memcache.py | 6 + .../token/persistence/backends/memcache_pool.py | 6 + .../keystone/token/persistence/backends/sql.py | 141 +++++++++++---------- keystone-moon/keystone/token/persistence/core.py | 25 ++-- 6 files changed, 112 insertions(+), 91 deletions(-) (limited to 'keystone-moon/keystone/token/persistence') diff --git a/keystone-moon/keystone/token/persistence/__init__.py b/keystone-moon/keystone/token/persistence/__init__.py index 89ec875d..9d8e17f2 100644 --- a/keystone-moon/keystone/token/persistence/__init__.py +++ b/keystone-moon/keystone/token/persistence/__init__.py @@ -13,4 +13,4 @@ from keystone.token.persistence.core import * # noqa -__all__ = ['Manager', 'Driver'] +__all__ = ('Manager', 'Driver') diff --git a/keystone-moon/keystone/token/persistence/backends/kvs.py b/keystone-moon/keystone/token/persistence/backends/kvs.py index 51931586..3620db58 100644 --- a/keystone-moon/keystone/token/persistence/backends/kvs.py +++ b/keystone-moon/keystone/token/persistence/backends/kvs.py @@ -55,10 +55,10 @@ class Token(token.persistence.TokenDriverV8): if self.__class__ == Token: # NOTE(morganfainberg): Only warn if the base KVS implementation # is instantiated. - LOG.warn(_LW('It is recommended to only use the base ' - 'key-value-store implementation for the token driver ' - "for testing purposes. Please use 'memcache' or " - "'sql' instead.")) + LOG.warning(_LW('It is recommended to only use the base ' + 'key-value-store implementation for the token ' + 'driver for testing purposes. Please use ' + "'memcache' or 'sql' instead.")) def _prefix_token_id(self, token_id): return 'token-%s' % token_id.encode('utf-8') @@ -138,8 +138,10 @@ class Token(token.persistence.TokenDriverV8): return data_copy def _get_user_token_list_with_expiry(self, user_key): - """Return a list of tuples in the format (token_id, token_expiry) for - the user_key. + """Return user token list with token expiry. + + :return: the tuples in the format (token_id, token_expiry) + :rtype: list """ return self._get_key_or_default(user_key, default=[]) @@ -210,6 +212,15 @@ class Token(token.persistence.TokenDriverV8): subsecond=True) revoked_token_data['id'] = data['id'] + token_data = data['token_data'] + if 'access' in token_data: + # It's a v2 token. + audit_ids = token_data['access']['token']['audit_ids'] + else: + # It's a v3 token. + audit_ids = token_data['token']['audit_ids'] + revoked_token_data['audit_id'] = audit_ids[0] + token_list = self._get_key_or_default(self.revocation_key, default=[]) if not isinstance(token_list, list): # NOTE(morganfainberg): In the case that the revocation list is not diff --git a/keystone-moon/keystone/token/persistence/backends/memcache.py b/keystone-moon/keystone/token/persistence/backends/memcache.py index 03f27eaf..e6b0fcab 100644 --- a/keystone-moon/keystone/token/persistence/backends/memcache.py +++ b/keystone-moon/keystone/token/persistence/backends/memcache.py @@ -14,6 +14,7 @@ # under the License. from oslo_config import cfg +from oslo_log import versionutils from keystone.token.persistence.backends import kvs @@ -25,6 +26,11 @@ class Token(kvs.Token): kvs_backend = 'openstack.kvs.Memcached' memcached_backend = 'memcached' + @versionutils.deprecated( + what='Memcache Token Persistence Driver', + as_of=versionutils.deprecated.MITAKA, + in_favor_of='fernet token driver (no-persistence)', + remove_in=0) def __init__(self, *args, **kwargs): kwargs['memcached_backend'] = self.memcached_backend kwargs['no_expiry_keys'] = [self.revocation_key] diff --git a/keystone-moon/keystone/token/persistence/backends/memcache_pool.py b/keystone-moon/keystone/token/persistence/backends/memcache_pool.py index 55f9e8ae..39a5ca65 100644 --- a/keystone-moon/keystone/token/persistence/backends/memcache_pool.py +++ b/keystone-moon/keystone/token/persistence/backends/memcache_pool.py @@ -11,6 +11,7 @@ # under the License. from oslo_config import cfg +from oslo_log import versionutils from keystone.token.persistence.backends import memcache @@ -21,6 +22,11 @@ CONF = cfg.CONF class Token(memcache.Token): memcached_backend = 'pooled_memcached' + @versionutils.deprecated( + what='Memcache Pool Token Persistence Driver', + as_of=versionutils.deprecated.MITAKA, + in_favor_of='fernet token driver (no-persistence)', + remove_in=0) def __init__(self, *args, **kwargs): for arg in ('dead_retry', 'socket_timeout', 'pool_maxsize', 'pool_unused_timeout', 'pool_connection_get_timeout'): diff --git a/keystone-moon/keystone/token/persistence/backends/sql.py b/keystone-moon/keystone/token/persistence/backends/sql.py index 6fc1d223..4b3439a1 100644 --- a/keystone-moon/keystone/token/persistence/backends/sql.py +++ b/keystone-moon/keystone/token/persistence/backends/sql.py @@ -53,7 +53,6 @@ def _expiry_range_batched(session, upper_bound_func, batch_size): Return the timestamp of the next token that is `batch_size` rows from being the oldest expired token. """ - # This expiry strategy splits the tokens into roughly equal sized batches # to be deleted. It does this by finding the timestamp of a token # `batch_size` rows from the oldest token and yielding that to the caller. @@ -79,7 +78,6 @@ def _expiry_range_batched(session, upper_bound_func, batch_size): def _expiry_range_all(session, upper_bound_func): """Expires all tokens in one pass.""" - yield upper_bound_func() @@ -88,11 +86,11 @@ class Token(token.persistence.TokenDriverV8): def get_token(self, token_id): if token_id is None: raise exception.TokenNotFound(token_id=token_id) - session = sql.get_session() - token_ref = session.query(TokenModel).get(token_id) - if not token_ref or not token_ref.valid: - raise exception.TokenNotFound(token_id=token_id) - return token_ref.to_dict() + with sql.session_for_read() as session: + token_ref = session.query(TokenModel).get(token_id) + if not token_ref or not token_ref.valid: + raise exception.TokenNotFound(token_id=token_id) + return token_ref.to_dict() def create_token(self, token_id, data): data_copy = copy.deepcopy(data) @@ -103,14 +101,12 @@ class Token(token.persistence.TokenDriverV8): token_ref = TokenModel.from_dict(data_copy) token_ref.valid = True - session = sql.get_session() - with session.begin(): + with sql.session_for_write() as session: session.add(token_ref) return token_ref.to_dict() def delete_token(self, token_id): - session = sql.get_session() - with session.begin(): + with sql.session_for_write() as session: token_ref = session.query(TokenModel).get(token_id) if not token_ref or not token_ref.valid: raise exception.TokenNotFound(token_id=token_id) @@ -126,9 +122,8 @@ class Token(token.persistence.TokenDriverV8): or the trustor's user ID, so will use trust_id to query the tokens. """ - session = sql.get_session() token_list = [] - with session.begin(): + with sql.session_for_write() as session: now = timeutils.utcnow() query = session.query(TokenModel) query = query.filter_by(valid=True) @@ -169,38 +164,37 @@ class Token(token.persistence.TokenDriverV8): return False def _list_tokens_for_trust(self, trust_id): - session = sql.get_session() - tokens = [] - now = timeutils.utcnow() - query = session.query(TokenModel) - query = query.filter(TokenModel.expires > now) - query = query.filter(TokenModel.trust_id == trust_id) - - token_references = query.filter_by(valid=True) - for token_ref in token_references: - token_ref_dict = token_ref.to_dict() - tokens.append(token_ref_dict['id']) - return tokens + with sql.session_for_read() as session: + tokens = [] + now = timeutils.utcnow() + query = session.query(TokenModel) + query = query.filter(TokenModel.expires > now) + query = query.filter(TokenModel.trust_id == trust_id) + + token_references = query.filter_by(valid=True) + for token_ref in token_references: + token_ref_dict = token_ref.to_dict() + tokens.append(token_ref_dict['id']) + return tokens def _list_tokens_for_user(self, user_id, tenant_id=None): - session = sql.get_session() - tokens = [] - now = timeutils.utcnow() - query = session.query(TokenModel) - query = query.filter(TokenModel.expires > now) - query = query.filter(TokenModel.user_id == user_id) - - token_references = query.filter_by(valid=True) - for token_ref in token_references: - token_ref_dict = token_ref.to_dict() - if self._tenant_matches(tenant_id, token_ref_dict): - tokens.append(token_ref['id']) - return tokens + with sql.session_for_read() as session: + tokens = [] + now = timeutils.utcnow() + query = session.query(TokenModel) + query = query.filter(TokenModel.expires > now) + query = query.filter(TokenModel.user_id == user_id) + + token_references = query.filter_by(valid=True) + for token_ref in token_references: + token_ref_dict = token_ref.to_dict() + if self._tenant_matches(tenant_id, token_ref_dict): + tokens.append(token_ref['id']) + return tokens def _list_tokens_for_consumer(self, user_id, consumer_id): tokens = [] - session = sql.get_session() - with session.begin(): + with sql.session_for_write() as session: now = timeutils.utcnow() query = session.query(TokenModel) query = query.filter(TokenModel.expires > now) @@ -225,19 +219,29 @@ class Token(token.persistence.TokenDriverV8): return self._list_tokens_for_user(user_id, tenant_id) def list_revoked_tokens(self): - session = sql.get_session() - tokens = [] - now = timeutils.utcnow() - query = session.query(TokenModel.id, TokenModel.expires) - query = query.filter(TokenModel.expires > now) - token_references = query.filter_by(valid=False) - for token_ref in token_references: - record = { - 'id': token_ref[0], - 'expires': token_ref[1], - } - tokens.append(record) - return tokens + with sql.session_for_read() as session: + tokens = [] + now = timeutils.utcnow() + query = session.query(TokenModel.id, TokenModel.expires, + TokenModel.extra) + query = query.filter(TokenModel.expires > now) + token_references = query.filter_by(valid=False) + for token_ref in token_references: + token_data = token_ref[2]['token_data'] + if 'access' in token_data: + # It's a v2 token. + audit_ids = token_data['access']['token']['audit_ids'] + else: + # It's a v3 token. + audit_ids = token_data['token']['audit_ids'] + + record = { + 'id': token_ref[0], + 'expires': token_ref[1], + 'audit_id': audit_ids[0], + } + tokens.append(record) + return tokens def _expiry_range_strategy(self, dialect): """Choose a token range expiration strategy @@ -245,7 +249,6 @@ class Token(token.persistence.TokenDriverV8): Based on the DB dialect, select an expiry range callable that is appropriate. """ - # DB2 and MySQL can both benefit from a batched strategy. On DB2 the # transaction log can fill up and on MySQL w/Galera, large # transactions can exceed the maximum write set size. @@ -266,18 +269,18 @@ class Token(token.persistence.TokenDriverV8): return _expiry_range_all def flush_expired_tokens(self): - session = sql.get_session() - dialect = session.bind.dialect.name - expiry_range_func = self._expiry_range_strategy(dialect) - query = session.query(TokenModel.expires) - total_removed = 0 - upper_bound_func = timeutils.utcnow - for expiry_time in expiry_range_func(session, upper_bound_func): - delete_query = query.filter(TokenModel.expires <= - expiry_time) - row_count = delete_query.delete(synchronize_session=False) - total_removed += row_count - LOG.debug('Removed %d total expired tokens', total_removed) - - session.flush() - LOG.info(_LI('Total expired tokens removed: %d'), total_removed) + with sql.session_for_write() as session: + dialect = session.bind.dialect.name + expiry_range_func = self._expiry_range_strategy(dialect) + query = session.query(TokenModel.expires) + total_removed = 0 + upper_bound_func = timeutils.utcnow + for expiry_time in expiry_range_func(session, upper_bound_func): + delete_query = query.filter(TokenModel.expires <= + expiry_time) + row_count = delete_query.delete(synchronize_session=False) + total_removed += row_count + LOG.debug('Removed %d total expired tokens', total_removed) + + session.flush() + LOG.info(_LI('Total expired tokens removed: %d'), total_removed) diff --git a/keystone-moon/keystone/token/persistence/core.py b/keystone-moon/keystone/token/persistence/core.py index e68970ac..76c3ff70 100644 --- a/keystone-moon/keystone/token/persistence/core.py +++ b/keystone-moon/keystone/token/persistence/core.py @@ -32,9 +32,9 @@ from keystone.token import utils CONF = cfg.CONF LOG = log.getLogger(__name__) -MEMOIZE = cache.get_memoization_decorator(section='token') -REVOCATION_MEMOIZE = cache.get_memoization_decorator( - section='token', expiration_section='revoke') +MEMOIZE = cache.get_memoization_decorator(group='token') +REVOCATION_MEMOIZE = cache.get_memoization_decorator(group='token', + expiration_group='revoke') @dependency.requires('assignment_api', 'identity_api', 'resource_api', @@ -60,11 +60,6 @@ class PersistenceManager(manager.Manager): raise exception.TokenNotFound(token_id=token_id) def get_token(self, token_id): - if not token_id: - # NOTE(morganfainberg): There are cases when the - # context['token_id'] will in-fact be None. This also saves - # a round-trip to the backend if we don't have a token_id. - raise exception.TokenNotFound(token_id='') unique_id = utils.generate_unique_id(token_id) token_ref = self._get_token(unique_id) # NOTE(morganfainberg): Lift expired checking to the manager, there is @@ -206,13 +201,13 @@ class Manager(object): This class is a proxy class to the token_provider_api's persistence manager. """ + def __init__(self): # NOTE(morganfainberg): __init__ is required for dependency processing. super(Manager, self).__init__() def __getattr__(self, item): """Forward calls to the `token_provider_api` persistence manager.""" - # NOTE(morganfainberg): Prevent infinite recursion, raise an # AttributeError for 'token_provider_api' ensuring that the dep # injection doesn't infinitely try and lookup self.token_provider_api @@ -240,7 +235,7 @@ class TokenDriverV8(object): :param token_id: identity of the token :type token_id: string :returns: token_ref - :raises: keystone.exception.TokenNotFound + :raises keystone.exception.TokenNotFound: If the token doesn't exist. """ raise exception.NotImplemented() # pragma: no cover @@ -276,7 +271,7 @@ class TokenDriverV8(object): :param token_id: identity of the token :type token_id: string :returns: None. - :raises: keystone.exception.TokenNotFound + :raises keystone.exception.TokenNotFound: If the token doesn't exist. """ raise exception.NotImplemented() # pragma: no cover @@ -304,7 +299,7 @@ class TokenDriverV8(object): :param consumer_id: identity of the consumer :type consumer_id: string :returns: The tokens that have been deleted. - :raises: keystone.exception.TokenNotFound + :raises keystone.exception.TokenNotFound: If the token doesn't exist. """ if not CONF.token.revoke_by_id: @@ -317,7 +312,8 @@ class TokenDriverV8(object): for token in token_list: try: self.delete_token(token) - except exception.NotFound: + except exception.NotFound: # nosec + # The token is already gone, good. pass return token_list @@ -354,8 +350,7 @@ class TokenDriverV8(object): @abc.abstractmethod def flush_expired_tokens(self): - """Archive or delete tokens that have expired. - """ + """Archive or delete tokens that have expired.""" raise exception.NotImplemented() # pragma: no cover -- cgit 1.2.3-korg