aboutsummaryrefslogtreecommitdiffstats
path: root/keystone-moon/keystone/token
diff options
context:
space:
mode:
authorDUVAL Thomas <thomas.duval@orange.com>2016-06-09 09:11:50 +0200
committerDUVAL Thomas <thomas.duval@orange.com>2016-06-09 09:11:50 +0200
commit2e7b4f2027a1147ca28301e4f88adf8274b39a1f (patch)
tree8b8d94001ebe6cc34106cf813b538911a8d66d9a /keystone-moon/keystone/token
parenta33bdcb627102a01244630a54cb4b5066b385a6a (diff)
Update Keystone core to Mitaka.
Change-Id: Ia10d6add16f4a9d25d1f42d420661c46332e69db
Diffstat (limited to 'keystone-moon/keystone/token')
-rw-r--r--keystone-moon/keystone/token/__init__.py1
-rw-r--r--keystone-moon/keystone/token/_simple_cert.py91
-rw-r--r--keystone-moon/keystone/token/controllers.py22
-rw-r--r--keystone-moon/keystone/token/persistence/__init__.py2
-rw-r--r--keystone-moon/keystone/token/persistence/backends/kvs.py23
-rw-r--r--keystone-moon/keystone/token/persistence/backends/memcache.py6
-rw-r--r--keystone-moon/keystone/token/persistence/backends/memcache_pool.py6
-rw-r--r--keystone-moon/keystone/token/persistence/backends/sql.py141
-rw-r--r--keystone-moon/keystone/token/persistence/core.py25
-rw-r--r--keystone-moon/keystone/token/provider.py128
-rw-r--r--keystone-moon/keystone/token/providers/common.py248
-rw-r--r--keystone-moon/keystone/token/providers/fernet/core.py239
-rw-r--r--keystone-moon/keystone/token/providers/fernet/token_formatters.py543
-rw-r--r--keystone-moon/keystone/token/providers/fernet/utils.py41
-rw-r--r--keystone-moon/keystone/token/providers/pki.py5
-rw-r--r--keystone-moon/keystone/token/providers/pkiz.py5
16 files changed, 833 insertions, 693 deletions
diff --git a/keystone-moon/keystone/token/__init__.py b/keystone-moon/keystone/token/__init__.py
index a73e19f9..f85ffc79 100644
--- a/keystone-moon/keystone/token/__init__.py
+++ b/keystone-moon/keystone/token/__init__.py
@@ -15,4 +15,3 @@
from keystone.token import controllers # noqa
from keystone.token import persistence # noqa
from keystone.token import provider # noqa
-from keystone.token import routers # noqa
diff --git a/keystone-moon/keystone/token/_simple_cert.py b/keystone-moon/keystone/token/_simple_cert.py
new file mode 100644
index 00000000..9c369255
--- /dev/null
+++ b/keystone-moon/keystone/token/_simple_cert.py
@@ -0,0 +1,91 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+# TODO(morganfainberg): Remove this file and extension in the "O" release as
+# it is only used in support of the PKI/PKIz token providers.
+import functools
+
+from oslo_config import cfg
+import webob
+
+from keystone.common import controller
+from keystone.common import dependency
+from keystone.common import extension
+from keystone.common import json_home
+from keystone.common import wsgi
+from keystone import exception
+
+
+CONF = cfg.CONF
+EXTENSION_DATA = {
+ 'name': 'OpenStack Simple Certificate API',
+ 'namespace': 'http://docs.openstack.org/identity/api/ext/'
+ 'OS-SIMPLE-CERT/v1.0',
+ 'alias': 'OS-SIMPLE-CERT',
+ 'updated': '2014-01-20T12:00:0-00:00',
+ 'description': 'OpenStack simple certificate retrieval extension',
+ 'links': [
+ {
+ 'rel': 'describedby',
+ 'type': 'text/html',
+ 'href': 'http://developer.openstack.org/'
+ 'api-ref-identity-v2-ext.html',
+ }
+ ]}
+extension.register_admin_extension(EXTENSION_DATA['alias'], EXTENSION_DATA)
+extension.register_public_extension(EXTENSION_DATA['alias'], EXTENSION_DATA)
+
+build_resource_relation = functools.partial(
+ json_home.build_v3_extension_resource_relation,
+ extension_name='OS-SIMPLE-CERT', extension_version='1.0')
+
+
+class Routers(wsgi.RoutersBase):
+
+ def _construct_url(self, suffix):
+ return "/OS-SIMPLE-CERT/%s" % suffix
+
+ def append_v3_routers(self, mapper, routers):
+ controller = SimpleCert()
+
+ self._add_resource(
+ mapper, controller,
+ path=self._construct_url('ca'),
+ get_action='get_ca_certificate',
+ rel=build_resource_relation(resource_name='ca_certificate'))
+ self._add_resource(
+ mapper, controller,
+ path=self._construct_url('certificates'),
+ get_action='list_certificates',
+ rel=build_resource_relation(resource_name='certificates'))
+
+
+@dependency.requires('token_provider_api')
+class SimpleCert(controller.V3Controller):
+
+ def _get_certificate(self, name):
+ try:
+ with open(name, 'r') as f:
+ body = f.read()
+ except IOError:
+ raise exception.CertificateFilesUnavailable()
+
+ # NOTE(jamielennox): We construct the webob Response ourselves here so
+ # that we don't pass through the JSON encoding process.
+ headers = [('Content-Type', 'application/x-pem-file')]
+ return webob.Response(body=body, headerlist=headers, status="200 OK")
+
+ def get_ca_certificate(self, context):
+ return self._get_certificate(CONF.signing.ca_certs)
+
+ def list_certificates(self, context):
+ return self._get_certificate(CONF.signing.certfile)
diff --git a/keystone-moon/keystone/token/controllers.py b/keystone-moon/keystone/token/controllers.py
index ff65e733..6eeb23ec 100644
--- a/keystone-moon/keystone/token/controllers.py
+++ b/keystone-moon/keystone/token/controllers.py
@@ -38,6 +38,7 @@ LOG = log.getLogger(__name__)
class ExternalAuthNotApplicable(Exception):
"""External authentication is not applicable."""
+
pass
@@ -48,19 +49,17 @@ class Auth(controller.V2Controller):
@controller.v2_deprecated
def ca_cert(self, context, auth=None):
- ca_file = open(CONF.signing.ca_certs, 'r')
- data = ca_file.read()
- ca_file.close()
+ with open(CONF.signing.ca_certs, 'r') as ca_file:
+ data = ca_file.read()
return data
@controller.v2_deprecated
def signing_cert(self, context, auth=None):
- cert_file = open(CONF.signing.certfile, 'r')
- data = cert_file.read()
- cert_file.close()
+ with open(CONF.signing.certfile, 'r') as cert_file:
+ data = cert_file.read()
return data
- @controller.v2_deprecated
+ @controller.v2_auth_deprecated
def authenticate(self, context, auth=None):
"""Authenticate credentials and return a token.
@@ -82,7 +81,6 @@ class Auth(controller.V2Controller):
Alternatively, this call accepts auth with only a token and tenant
that will return a token that is scoped to that tenant.
"""
-
if auth is None:
raise exception.ValidationError(attribute='auth',
target='request body')
@@ -182,7 +180,8 @@ class Auth(controller.V2Controller):
try:
token_model_ref = token_model.KeystoneToken(
token_id=old_token,
- token_data=self.token_provider_api.validate_token(old_token))
+ token_data=self.token_provider_api.validate_v2_token(old_token)
+ )
except exception.NotFound as e:
raise exception.Unauthorized(e)
@@ -369,6 +368,10 @@ class Auth(controller.V2Controller):
size=CONF.max_param_size)
if tenant_name:
+ if (CONF.resource.project_name_url_safe == 'strict' and
+ utils.is_not_url_safe(tenant_name)):
+ msg = _('Tenant name cannot contain reserved characters.')
+ raise exception.Unauthorized(message=msg)
try:
tenant_ref = self.resource_api.get_project_by_name(
tenant_name, CONF.identity.default_domain_id)
@@ -379,7 +382,6 @@ class Auth(controller.V2Controller):
def _get_project_roles_and_ref(self, user_id, tenant_id):
"""Returns the project roles for this user, and the project ref."""
-
tenant_ref = None
role_list = []
if tenant_id:
diff --git a/keystone-moon/keystone/token/persistence/__init__.py b/keystone-moon/keystone/token/persistence/__init__.py
index 89ec875d..9d8e17f2 100644
--- a/keystone-moon/keystone/token/persistence/__init__.py
+++ b/keystone-moon/keystone/token/persistence/__init__.py
@@ -13,4 +13,4 @@
from keystone.token.persistence.core import * # noqa
-__all__ = ['Manager', 'Driver']
+__all__ = ('Manager', 'Driver')
diff --git a/keystone-moon/keystone/token/persistence/backends/kvs.py b/keystone-moon/keystone/token/persistence/backends/kvs.py
index 51931586..3620db58 100644
--- a/keystone-moon/keystone/token/persistence/backends/kvs.py
+++ b/keystone-moon/keystone/token/persistence/backends/kvs.py
@@ -55,10 +55,10 @@ class Token(token.persistence.TokenDriverV8):
if self.__class__ == Token:
# NOTE(morganfainberg): Only warn if the base KVS implementation
# is instantiated.
- LOG.warn(_LW('It is recommended to only use the base '
- 'key-value-store implementation for the token driver '
- "for testing purposes. Please use 'memcache' or "
- "'sql' instead."))
+ LOG.warning(_LW('It is recommended to only use the base '
+ 'key-value-store implementation for the token '
+ 'driver for testing purposes. Please use '
+ "'memcache' or 'sql' instead."))
def _prefix_token_id(self, token_id):
return 'token-%s' % token_id.encode('utf-8')
@@ -138,8 +138,10 @@ class Token(token.persistence.TokenDriverV8):
return data_copy
def _get_user_token_list_with_expiry(self, user_key):
- """Return a list of tuples in the format (token_id, token_expiry) for
- the user_key.
+ """Return user token list with token expiry.
+
+ :return: the tuples in the format (token_id, token_expiry)
+ :rtype: list
"""
return self._get_key_or_default(user_key, default=[])
@@ -210,6 +212,15 @@ class Token(token.persistence.TokenDriverV8):
subsecond=True)
revoked_token_data['id'] = data['id']
+ token_data = data['token_data']
+ if 'access' in token_data:
+ # It's a v2 token.
+ audit_ids = token_data['access']['token']['audit_ids']
+ else:
+ # It's a v3 token.
+ audit_ids = token_data['token']['audit_ids']
+ revoked_token_data['audit_id'] = audit_ids[0]
+
token_list = self._get_key_or_default(self.revocation_key, default=[])
if not isinstance(token_list, list):
# NOTE(morganfainberg): In the case that the revocation list is not
diff --git a/keystone-moon/keystone/token/persistence/backends/memcache.py b/keystone-moon/keystone/token/persistence/backends/memcache.py
index 03f27eaf..e6b0fcab 100644
--- a/keystone-moon/keystone/token/persistence/backends/memcache.py
+++ b/keystone-moon/keystone/token/persistence/backends/memcache.py
@@ -14,6 +14,7 @@
# under the License.
from oslo_config import cfg
+from oslo_log import versionutils
from keystone.token.persistence.backends import kvs
@@ -25,6 +26,11 @@ class Token(kvs.Token):
kvs_backend = 'openstack.kvs.Memcached'
memcached_backend = 'memcached'
+ @versionutils.deprecated(
+ what='Memcache Token Persistence Driver',
+ as_of=versionutils.deprecated.MITAKA,
+ in_favor_of='fernet token driver (no-persistence)',
+ remove_in=0)
def __init__(self, *args, **kwargs):
kwargs['memcached_backend'] = self.memcached_backend
kwargs['no_expiry_keys'] = [self.revocation_key]
diff --git a/keystone-moon/keystone/token/persistence/backends/memcache_pool.py b/keystone-moon/keystone/token/persistence/backends/memcache_pool.py
index 55f9e8ae..39a5ca65 100644
--- a/keystone-moon/keystone/token/persistence/backends/memcache_pool.py
+++ b/keystone-moon/keystone/token/persistence/backends/memcache_pool.py
@@ -11,6 +11,7 @@
# under the License.
from oslo_config import cfg
+from oslo_log import versionutils
from keystone.token.persistence.backends import memcache
@@ -21,6 +22,11 @@ CONF = cfg.CONF
class Token(memcache.Token):
memcached_backend = 'pooled_memcached'
+ @versionutils.deprecated(
+ what='Memcache Pool Token Persistence Driver',
+ as_of=versionutils.deprecated.MITAKA,
+ in_favor_of='fernet token driver (no-persistence)',
+ remove_in=0)
def __init__(self, *args, **kwargs):
for arg in ('dead_retry', 'socket_timeout', 'pool_maxsize',
'pool_unused_timeout', 'pool_connection_get_timeout'):
diff --git a/keystone-moon/keystone/token/persistence/backends/sql.py b/keystone-moon/keystone/token/persistence/backends/sql.py
index 6fc1d223..4b3439a1 100644
--- a/keystone-moon/keystone/token/persistence/backends/sql.py
+++ b/keystone-moon/keystone/token/persistence/backends/sql.py
@@ -53,7 +53,6 @@ def _expiry_range_batched(session, upper_bound_func, batch_size):
Return the timestamp of the next token that is `batch_size` rows from
being the oldest expired token.
"""
-
# This expiry strategy splits the tokens into roughly equal sized batches
# to be deleted. It does this by finding the timestamp of a token
# `batch_size` rows from the oldest token and yielding that to the caller.
@@ -79,7 +78,6 @@ def _expiry_range_batched(session, upper_bound_func, batch_size):
def _expiry_range_all(session, upper_bound_func):
"""Expires all tokens in one pass."""
-
yield upper_bound_func()
@@ -88,11 +86,11 @@ class Token(token.persistence.TokenDriverV8):
def get_token(self, token_id):
if token_id is None:
raise exception.TokenNotFound(token_id=token_id)
- session = sql.get_session()
- token_ref = session.query(TokenModel).get(token_id)
- if not token_ref or not token_ref.valid:
- raise exception.TokenNotFound(token_id=token_id)
- return token_ref.to_dict()
+ with sql.session_for_read() as session:
+ token_ref = session.query(TokenModel).get(token_id)
+ if not token_ref or not token_ref.valid:
+ raise exception.TokenNotFound(token_id=token_id)
+ return token_ref.to_dict()
def create_token(self, token_id, data):
data_copy = copy.deepcopy(data)
@@ -103,14 +101,12 @@ class Token(token.persistence.TokenDriverV8):
token_ref = TokenModel.from_dict(data_copy)
token_ref.valid = True
- session = sql.get_session()
- with session.begin():
+ with sql.session_for_write() as session:
session.add(token_ref)
return token_ref.to_dict()
def delete_token(self, token_id):
- session = sql.get_session()
- with session.begin():
+ with sql.session_for_write() as session:
token_ref = session.query(TokenModel).get(token_id)
if not token_ref or not token_ref.valid:
raise exception.TokenNotFound(token_id=token_id)
@@ -126,9 +122,8 @@ class Token(token.persistence.TokenDriverV8):
or the trustor's user ID, so will use trust_id to query the tokens.
"""
- session = sql.get_session()
token_list = []
- with session.begin():
+ with sql.session_for_write() as session:
now = timeutils.utcnow()
query = session.query(TokenModel)
query = query.filter_by(valid=True)
@@ -169,38 +164,37 @@ class Token(token.persistence.TokenDriverV8):
return False
def _list_tokens_for_trust(self, trust_id):
- session = sql.get_session()
- tokens = []
- now = timeutils.utcnow()
- query = session.query(TokenModel)
- query = query.filter(TokenModel.expires > now)
- query = query.filter(TokenModel.trust_id == trust_id)
-
- token_references = query.filter_by(valid=True)
- for token_ref in token_references:
- token_ref_dict = token_ref.to_dict()
- tokens.append(token_ref_dict['id'])
- return tokens
+ with sql.session_for_read() as session:
+ tokens = []
+ now = timeutils.utcnow()
+ query = session.query(TokenModel)
+ query = query.filter(TokenModel.expires > now)
+ query = query.filter(TokenModel.trust_id == trust_id)
+
+ token_references = query.filter_by(valid=True)
+ for token_ref in token_references:
+ token_ref_dict = token_ref.to_dict()
+ tokens.append(token_ref_dict['id'])
+ return tokens
def _list_tokens_for_user(self, user_id, tenant_id=None):
- session = sql.get_session()
- tokens = []
- now = timeutils.utcnow()
- query = session.query(TokenModel)
- query = query.filter(TokenModel.expires > now)
- query = query.filter(TokenModel.user_id == user_id)
-
- token_references = query.filter_by(valid=True)
- for token_ref in token_references:
- token_ref_dict = token_ref.to_dict()
- if self._tenant_matches(tenant_id, token_ref_dict):
- tokens.append(token_ref['id'])
- return tokens
+ with sql.session_for_read() as session:
+ tokens = []
+ now = timeutils.utcnow()
+ query = session.query(TokenModel)
+ query = query.filter(TokenModel.expires > now)
+ query = query.filter(TokenModel.user_id == user_id)
+
+ token_references = query.filter_by(valid=True)
+ for token_ref in token_references:
+ token_ref_dict = token_ref.to_dict()
+ if self._tenant_matches(tenant_id, token_ref_dict):
+ tokens.append(token_ref['id'])
+ return tokens
def _list_tokens_for_consumer(self, user_id, consumer_id):
tokens = []
- session = sql.get_session()
- with session.begin():
+ with sql.session_for_write() as session:
now = timeutils.utcnow()
query = session.query(TokenModel)
query = query.filter(TokenModel.expires > now)
@@ -225,19 +219,29 @@ class Token(token.persistence.TokenDriverV8):
return self._list_tokens_for_user(user_id, tenant_id)
def list_revoked_tokens(self):
- session = sql.get_session()
- tokens = []
- now = timeutils.utcnow()
- query = session.query(TokenModel.id, TokenModel.expires)
- query = query.filter(TokenModel.expires > now)
- token_references = query.filter_by(valid=False)
- for token_ref in token_references:
- record = {
- 'id': token_ref[0],
- 'expires': token_ref[1],
- }
- tokens.append(record)
- return tokens
+ with sql.session_for_read() as session:
+ tokens = []
+ now = timeutils.utcnow()
+ query = session.query(TokenModel.id, TokenModel.expires,
+ TokenModel.extra)
+ query = query.filter(TokenModel.expires > now)
+ token_references = query.filter_by(valid=False)
+ for token_ref in token_references:
+ token_data = token_ref[2]['token_data']
+ if 'access' in token_data:
+ # It's a v2 token.
+ audit_ids = token_data['access']['token']['audit_ids']
+ else:
+ # It's a v3 token.
+ audit_ids = token_data['token']['audit_ids']
+
+ record = {
+ 'id': token_ref[0],
+ 'expires': token_ref[1],
+ 'audit_id': audit_ids[0],
+ }
+ tokens.append(record)
+ return tokens
def _expiry_range_strategy(self, dialect):
"""Choose a token range expiration strategy
@@ -245,7 +249,6 @@ class Token(token.persistence.TokenDriverV8):
Based on the DB dialect, select an expiry range callable that is
appropriate.
"""
-
# DB2 and MySQL can both benefit from a batched strategy. On DB2 the
# transaction log can fill up and on MySQL w/Galera, large
# transactions can exceed the maximum write set size.
@@ -266,18 +269,18 @@ class Token(token.persistence.TokenDriverV8):
return _expiry_range_all
def flush_expired_tokens(self):
- session = sql.get_session()
- dialect = session.bind.dialect.name
- expiry_range_func = self._expiry_range_strategy(dialect)
- query = session.query(TokenModel.expires)
- total_removed = 0
- upper_bound_func = timeutils.utcnow
- for expiry_time in expiry_range_func(session, upper_bound_func):
- delete_query = query.filter(TokenModel.expires <=
- expiry_time)
- row_count = delete_query.delete(synchronize_session=False)
- total_removed += row_count
- LOG.debug('Removed %d total expired tokens', total_removed)
-
- session.flush()
- LOG.info(_LI('Total expired tokens removed: %d'), total_removed)
+ with sql.session_for_write() as session:
+ dialect = session.bind.dialect.name
+ expiry_range_func = self._expiry_range_strategy(dialect)
+ query = session.query(TokenModel.expires)
+ total_removed = 0
+ upper_bound_func = timeutils.utcnow
+ for expiry_time in expiry_range_func(session, upper_bound_func):
+ delete_query = query.filter(TokenModel.expires <=
+ expiry_time)
+ row_count = delete_query.delete(synchronize_session=False)
+ total_removed += row_count
+ LOG.debug('Removed %d total expired tokens', total_removed)
+
+ session.flush()
+ LOG.info(_LI('Total expired tokens removed: %d'), total_removed)
diff --git a/keystone-moon/keystone/token/persistence/core.py b/keystone-moon/keystone/token/persistence/core.py
index e68970ac..76c3ff70 100644
--- a/keystone-moon/keystone/token/persistence/core.py
+++ b/keystone-moon/keystone/token/persistence/core.py
@@ -32,9 +32,9 @@ from keystone.token import utils
CONF = cfg.CONF
LOG = log.getLogger(__name__)
-MEMOIZE = cache.get_memoization_decorator(section='token')
-REVOCATION_MEMOIZE = cache.get_memoization_decorator(
- section='token', expiration_section='revoke')
+MEMOIZE = cache.get_memoization_decorator(group='token')
+REVOCATION_MEMOIZE = cache.get_memoization_decorator(group='token',
+ expiration_group='revoke')
@dependency.requires('assignment_api', 'identity_api', 'resource_api',
@@ -60,11 +60,6 @@ class PersistenceManager(manager.Manager):
raise exception.TokenNotFound(token_id=token_id)
def get_token(self, token_id):
- if not token_id:
- # NOTE(morganfainberg): There are cases when the
- # context['token_id'] will in-fact be None. This also saves
- # a round-trip to the backend if we don't have a token_id.
- raise exception.TokenNotFound(token_id='')
unique_id = utils.generate_unique_id(token_id)
token_ref = self._get_token(unique_id)
# NOTE(morganfainberg): Lift expired checking to the manager, there is
@@ -206,13 +201,13 @@ class Manager(object):
This class is a proxy class to the token_provider_api's persistence
manager.
"""
+
def __init__(self):
# NOTE(morganfainberg): __init__ is required for dependency processing.
super(Manager, self).__init__()
def __getattr__(self, item):
"""Forward calls to the `token_provider_api` persistence manager."""
-
# NOTE(morganfainberg): Prevent infinite recursion, raise an
# AttributeError for 'token_provider_api' ensuring that the dep
# injection doesn't infinitely try and lookup self.token_provider_api
@@ -240,7 +235,7 @@ class TokenDriverV8(object):
:param token_id: identity of the token
:type token_id: string
:returns: token_ref
- :raises: keystone.exception.TokenNotFound
+ :raises keystone.exception.TokenNotFound: If the token doesn't exist.
"""
raise exception.NotImplemented() # pragma: no cover
@@ -276,7 +271,7 @@ class TokenDriverV8(object):
:param token_id: identity of the token
:type token_id: string
:returns: None.
- :raises: keystone.exception.TokenNotFound
+ :raises keystone.exception.TokenNotFound: If the token doesn't exist.
"""
raise exception.NotImplemented() # pragma: no cover
@@ -304,7 +299,7 @@ class TokenDriverV8(object):
:param consumer_id: identity of the consumer
:type consumer_id: string
:returns: The tokens that have been deleted.
- :raises: keystone.exception.TokenNotFound
+ :raises keystone.exception.TokenNotFound: If the token doesn't exist.
"""
if not CONF.token.revoke_by_id:
@@ -317,7 +312,8 @@ class TokenDriverV8(object):
for token in token_list:
try:
self.delete_token(token)
- except exception.NotFound:
+ except exception.NotFound: # nosec
+ # The token is already gone, good.
pass
return token_list
@@ -354,8 +350,7 @@ class TokenDriverV8(object):
@abc.abstractmethod
def flush_expired_tokens(self):
- """Archive or delete tokens that have expired.
- """
+ """Archive or delete tokens that have expired."""
raise exception.NotImplemented() # pragma: no cover
diff --git a/keystone-moon/keystone/token/provider.py b/keystone-moon/keystone/token/provider.py
index 1422e41f..7c4166f4 100644
--- a/keystone-moon/keystone/token/provider.py
+++ b/keystone-moon/keystone/token/provider.py
@@ -33,12 +33,13 @@ from keystone.i18n import _, _LE
from keystone.models import token_model
from keystone import notifications
from keystone.token import persistence
+from keystone.token import providers
from keystone.token import utils
CONF = cfg.CONF
LOG = log.getLogger(__name__)
-MEMOIZE = cache.get_memoization_decorator(section='token')
+MEMOIZE = cache.get_memoization_decorator(group='token')
# NOTE(morganfainberg): This is for compatibility in case someone was relying
# on the old location of the UnsupportedTokenVersionException for their code.
@@ -51,18 +52,37 @@ VERSIONS = token_model.VERSIONS
def base64_encode(s):
- """Encode a URL-safe string."""
- return base64.urlsafe_b64encode(s).rstrip('=')
+ """Encode a URL-safe string.
+
+ :type s: six.text_type
+ :rtype: six.text_type
+
+ """
+ # urlsafe_b64encode() returns six.binary_type so need to convert to
+ # six.text_type, might as well do it before stripping.
+ return base64.urlsafe_b64encode(s).decode('utf-8').rstrip('=')
def random_urlsafe_str():
- """Generate a random URL-safe string."""
+ """Generate a random URL-safe string.
+
+ :rtype: six.text_type
+
+ """
# chop the padding (==) off the end of the encoding to save space
- return base64.urlsafe_b64encode(uuid.uuid4().bytes)[:-2]
+ return base64.urlsafe_b64encode(uuid.uuid4().bytes)[:-2].decode('utf-8')
def random_urlsafe_str_to_bytes(s):
- """Convert a string generated by ``random_urlsafe_str()`` to bytes."""
+ """Convert a string from :func:`random_urlsafe_str()` to six.binary_type.
+
+ :type s: six.text_type
+ :rtype: six.binary_type
+
+ """
+ # urlsafe_b64decode() requires str, unicode isn't accepted.
+ s = str(s)
+
# restore the padding (==) at the end of the string
return base64.urlsafe_b64decode(s + '==')
@@ -201,14 +221,29 @@ class Manager(manager.Manager):
self.revoke_api.check_token(token_values)
def validate_v2_token(self, token_id, belongs_to=None):
- unique_id = utils.generate_unique_id(token_id)
+ # NOTE(lbragstad): Only go to the persistence backend if the token
+ # provider requires it.
if self._needs_persistence:
# NOTE(morganfainberg): Ensure we never use the long-form token_id
# (PKI) as part of the cache_key.
+ unique_id = utils.generate_unique_id(token_id)
token_ref = self._persistence.get_token(unique_id)
+ token = self._validate_v2_token(token_ref)
else:
- token_ref = token_id
- token = self._validate_v2_token(token_ref)
+ # NOTE(lbragstad): If the token doesn't require persistence, then
+ # it is a fernet token. The fernet token provider doesn't care if
+ # it's creating version 2.0 tokens or v3 tokens, so we use the same
+ # validate_non_persistent_token() method to validate both. Then we
+ # can leverage a separate method to make version 3 token data look
+ # like version 2.0 token data. The pattern we want to move towards
+ # is one where the token providers just handle data and the
+ # controller layers handle interpreting the token data in a format
+ # that makes sense for the request.
+ v3_token_ref = self.validate_non_persistent_token(token_id)
+ v2_token_data_helper = providers.common.V2TokenDataHelper()
+ token = v2_token_data_helper.v3_to_v2_token(v3_token_ref)
+
+ # these are common things that happen regardless of token provider
token['access']['token']['id'] = token_id
self._token_belongs_to(token, belongs_to)
self._is_valid_token(token)
@@ -223,37 +258,52 @@ class Manager(manager.Manager):
self.revoke_api.check_token(token_values)
def check_revocation(self, token):
- version = self.driver.get_token_version(token)
+ version = self.get_token_version(token)
if version == V2:
return self.check_revocation_v2(token)
else:
return self.check_revocation_v3(token)
def validate_v3_token(self, token_id):
- unique_id = utils.generate_unique_id(token_id)
- # NOTE(lbragstad): Only go to persistent storage if we have a token to
- # fetch from the backend. If the Fernet token provider is being used
- # this step isn't necessary. The Fernet token reference is persisted in
- # the token_id, so in this case set the token_ref as the identifier of
- # the token.
- if not self._needs_persistence:
- token_ref = token_id
- else:
- # NOTE(morganfainberg): Ensure we never use the long-form token_id
- # (PKI) as part of the cache_key.
- token_ref = self._persistence.get_token(unique_id)
- token = self._validate_v3_token(token_ref)
- self._is_valid_token(token)
- return token
+ if not token_id:
+ raise exception.TokenNotFound(_('No token in the request'))
+
+ try:
+ # NOTE(lbragstad): Only go to persistent storage if we have a token
+ # to fetch from the backend (the driver persists the token).
+ # Otherwise the information about the token must be in the token
+ # id.
+ if not self._needs_persistence:
+ token_ref = self.validate_non_persistent_token(token_id)
+ else:
+ unique_id = utils.generate_unique_id(token_id)
+ # NOTE(morganfainberg): Ensure we never use the long-form
+ # token_id (PKI) as part of the cache_key.
+ token_ref = self._persistence.get_token(unique_id)
+ token_ref = self._validate_v3_token(token_ref)
+ self._is_valid_token(token_ref)
+ return token_ref
+ except exception.Unauthorized as e:
+ LOG.debug('Unable to validate token: %s', e)
+ raise exception.TokenNotFound(token_id=token_id)
@MEMOIZE
def _validate_token(self, token_id):
+ if not token_id:
+ raise exception.TokenNotFound(_('No token in the request'))
+
if not self._needs_persistence:
- return self.driver.validate_v3_token(token_id)
+ # NOTE(lbragstad): This will validate v2 and v3 non-persistent
+ # tokens.
+ return self.driver.validate_non_persistent_token(token_id)
token_ref = self._persistence.get_token(token_id)
- version = self.driver.get_token_version(token_ref)
+ version = self.get_token_version(token_ref)
if version == self.V3:
- return self.driver.validate_v3_token(token_ref)
+ try:
+ return self.driver.validate_v3_token(token_ref)
+ except exception.Unauthorized as e:
+ LOG.debug('Unable to validate token: %s', e)
+ raise exception.TokenNotFound(token_id=token_id)
elif version == self.V2:
return self.driver.validate_v2_token(token_ref)
raise exception.UnsupportedTokenVersionException()
@@ -268,7 +318,6 @@ class Manager(manager.Manager):
def _is_valid_token(self, token):
"""Verify the token is valid format and has not expired."""
-
current_time = timeutils.normalize_time(timeutils.utcnow())
try:
@@ -490,7 +539,8 @@ class Provider(object):
:param token_data: token_data
:type token_data: dict
:returns: token version string
- :raises: keystone.token.provider.UnsupportedTokenVersionException
+ :raises keystone.exception.UnsupportedTokenVersionException:
+ If the token version is not expected.
"""
raise exception.NotImplemented() # pragma: no cover
@@ -548,8 +598,19 @@ class Provider(object):
:param token_ref: the token reference
:type token_ref: dict
:returns: token data
- :raises: keystone.exception.TokenNotFound
+ :raises keystone.exception.TokenNotFound: If the token doesn't exist.
+
+ """
+ raise exception.NotImplemented() # pragma: no cover
+ @abc.abstractmethod
+ def validate_non_persistent_token(self, token_id):
+ """Validate a given non-persistent token id and return the token_data.
+
+ :param token_id: the token id
+ :type token_id: string
+ :returns: token data
+ :raises keystone.exception.TokenNotFound: When the token is invalid
"""
raise exception.NotImplemented() # pragma: no cover
@@ -560,7 +621,7 @@ class Provider(object):
:param token_ref: the token reference
:type token_ref: dict
:returns: token data
- :raises: keystone.exception.TokenNotFound
+ :raises keystone.exception.TokenNotFound: If the token doesn't exist.
"""
raise exception.NotImplemented() # pragma: no cover
@@ -570,6 +631,7 @@ class Provider(object):
:param token_data: token information
:type token_data: dict
- returns: token identifier
+ :returns: token identifier
+ :rtype: six.text_type
"""
raise exception.NotImplemented() # pragma: no cover
diff --git a/keystone-moon/keystone/token/providers/common.py b/keystone-moon/keystone/token/providers/common.py
index b71458cd..94729178 100644
--- a/keystone-moon/keystone/token/providers/common.py
+++ b/keystone-moon/keystone/token/providers/common.py
@@ -14,7 +14,6 @@
from oslo_config import cfg
from oslo_log import log
-from oslo_log import versionutils
from oslo_serialization import jsonutils
import six
from six.moves.urllib import parse
@@ -22,8 +21,8 @@ from six.moves.urllib import parse
from keystone.common import controller as common_controller
from keystone.common import dependency
from keystone.common import utils
-from keystone.contrib.federation import constants as federation_constants
from keystone import exception
+from keystone.federation import constants as federation_constants
from keystone.i18n import _, _LE
from keystone import token
from keystone.token import provider
@@ -33,72 +32,69 @@ LOG = log.getLogger(__name__)
CONF = cfg.CONF
-@dependency.requires('catalog_api', 'resource_api')
+@dependency.requires('catalog_api', 'resource_api', 'assignment_api')
class V2TokenDataHelper(object):
"""Creates V2 token data."""
def v3_to_v2_token(self, v3_token_data):
+ """Convert v3 token data into v2.0 token data.
+
+ This method expects a dictionary generated from
+ V3TokenDataHelper.get_token_data() and converts it to look like a v2.0
+ token dictionary.
+
+ :param v3_token_data: dictionary formatted for v3 tokens
+ :returns: dictionary formatted for v2 tokens
+ :raises keystone.exception.Unauthorized: If a specific token type is
+ not supported in v2.
+
+ """
token_data = {}
# Build v2 token
v3_token = v3_token_data['token']
+ # NOTE(lbragstad): Version 2.0 tokens don't know about any domain other
+ # than the default domain specified in the configuration.
+ domain_id = v3_token.get('domain', {}).get('id')
+ if domain_id and CONF.identity.default_domain_id != domain_id:
+ msg = ('Unable to validate domain-scoped tokens outside of the '
+ 'default domain')
+ raise exception.Unauthorized(msg)
+
token = {}
token['expires'] = v3_token.get('expires_at')
token['issued_at'] = v3_token.get('issued_at')
token['audit_ids'] = v3_token.get('audit_ids')
- # Bail immediately if this is a domain-scoped token, which is not
- # supported by the v2 API at all.
- if 'domain' in v3_token:
- raise exception.Unauthorized(_(
- 'Domains are not supported by the v2 API. Please use the v3 '
- 'API instead.'))
-
- # Bail if this is a project-scoped token outside the default domain,
- # which may result in a namespace collision with a project inside the
- # default domain.
if 'project' in v3_token:
- if (v3_token['project']['domain']['id'] !=
- CONF.identity.default_domain_id):
- raise exception.Unauthorized(_(
- 'Project not found in the default domain (please use the '
- 'v3 API instead): %s') % v3_token['project']['id'])
-
# v3 token_data does not contain all tenant attributes
tenant = self.resource_api.get_project(
v3_token['project']['id'])
- token['tenant'] = common_controller.V2Controller.filter_domain_id(
+ # Drop domain specific fields since v2 calls are not domain-aware.
+ token['tenant'] = common_controller.V2Controller.v3_to_v2_project(
tenant)
token_data['token'] = token
# Build v2 user
v3_user = v3_token['user']
- # Bail if this is a token outside the default domain,
- # which may result in a namespace collision with a project inside the
- # default domain.
- if ('domain' in v3_user and v3_user['domain']['id'] !=
- CONF.identity.default_domain_id):
- raise exception.Unauthorized(_(
- 'User not found in the default domain (please use the v3 API '
- 'instead): %s') % v3_user['id'])
-
user = common_controller.V2Controller.v3_to_v2_user(v3_user)
- # Maintain Trust Data
if 'OS-TRUST:trust' in v3_token:
- v3_trust_data = v3_token['OS-TRUST:trust']
- token_data['trust'] = {
- 'trustee_user_id': v3_trust_data['trustee_user']['id'],
- 'id': v3_trust_data['id'],
- 'trustor_user_id': v3_trust_data['trustor_user']['id'],
- 'impersonation': v3_trust_data['impersonation']
- }
+ msg = ('Unable to validate trust-scoped tokens using version v2.0 '
+ 'API.')
+ raise exception.Unauthorized(msg)
+
+ if 'OS-OAUTH1' in v3_token:
+ msg = ('Unable to validate Oauth tokens using the version v2.0 '
+ 'API.')
+ raise exception.Unauthorized(msg)
# Set user roles
user['roles'] = []
role_ids = []
for role in v3_token.get('roles', []):
+ role_ids.append(role.pop('id'))
user['roles'].append(role)
user['roles_links'] = []
@@ -145,7 +141,7 @@ class V2TokenDataHelper(object):
o = {'access': {'token': {'id': token_ref['id'],
'expires': expires,
- 'issued_at': utils.strtime(),
+ 'issued_at': utils.isotime(subsecond=True),
'audit_ids': audit_info
},
'user': {'id': user_ref['id'],
@@ -186,7 +182,8 @@ class V2TokenDataHelper(object):
@classmethod
def format_catalog(cls, catalog_ref):
- """Munge catalogs from internal to output format
+ """Munge catalogs from internal to output format.
+
Internal catalogs look like::
{$REGION: {
@@ -235,6 +232,7 @@ class V2TokenDataHelper(object):
'identity_api', 'resource_api', 'role_api', 'trust_api')
class V3TokenDataHelper(object):
"""Token data helper."""
+
def __init__(self):
# Keep __init__ around to ensure dependency injection works.
super(V3TokenDataHelper, self).__init__()
@@ -248,8 +246,12 @@ class V3TokenDataHelper(object):
filtered_project = {
'id': project_ref['id'],
'name': project_ref['name']}
- filtered_project['domain'] = self._get_filtered_domain(
- project_ref['domain_id'])
+ if project_ref['domain_id'] is not None:
+ filtered_project['domain'] = (
+ self._get_filtered_domain(project_ref['domain_id']))
+ else:
+ # Projects acting as a domain do not have a domain_id attribute
+ filtered_project['domain'] = None
return filtered_project
def _populate_scope(self, token_data, domain_id, project_id):
@@ -262,6 +264,18 @@ class V3TokenDataHelper(object):
if project_id:
token_data['project'] = self._get_filtered_project(project_id)
+ def _populate_is_admin_project(self, token_data):
+ # TODO(ayoung): Support the ability for a project acting as a domain
+ # to be the admin project once the rest of the code for projects
+ # acting as domains is merged. Code will likely be:
+ # (r.admin_project_name == None and project['is_domain'] == True
+ # and project['name'] == r.admin_project_domain_name)
+ project = token_data['project']
+ r = CONF.resource
+ if (project['name'] == r.admin_project_name and
+ project['domain']['name'] == r.admin_project_domain_name):
+ token_data['is_admin_project'] = True
+
def _get_roles_for_user(self, user_id, domain_id, project_id):
roles = []
if domain_id:
@@ -282,12 +296,12 @@ class V3TokenDataHelper(object):
place.
:param token_data: a dictionary used for building token response
- :group_ids: list of group IDs a user is a member of
- :project_id: project ID to scope to
- :domain_id: domain ID to scope to
- :user_id: user ID
+ :param group_ids: list of group IDs a user is a member of
+ :param project_id: project ID to scope to
+ :param domain_id: domain ID to scope to
+ :param user_id: user ID
- :raises: exception.Unauthorized - when no roles were found for a
+ :raises keystone.exception.Unauthorized: when no roles were found for a
(group_ids, project_id) or (group_ids, domain_id) pairs.
"""
@@ -370,7 +384,16 @@ class V3TokenDataHelper(object):
return
if CONF.trust.enabled and trust:
- token_user_id = trust['trustor_user_id']
+ # If redelegated_trust_id is set, then we must traverse the
+ # trust_chain in order to determine who the original trustor is. We
+ # need to do this because the user ID of the original trustor helps
+ # us determine scope in the redelegated context.
+ if trust.get('redelegated_trust_id'):
+ trust_chain = self.trust_api.get_trust_pedigree(trust['id'])
+ token_user_id = trust_chain[-1]['trustor_user_id']
+ else:
+ token_user_id = trust['trustor_user_id']
+
token_project_id = trust['project_id']
# trusts do not support domains yet
token_domain_id = None
@@ -380,21 +403,39 @@ class V3TokenDataHelper(object):
token_domain_id = domain_id
if token_domain_id or token_project_id:
- roles = self._get_roles_for_user(token_user_id,
- token_domain_id,
- token_project_id)
filtered_roles = []
if CONF.trust.enabled and trust:
- for trust_role in trust['roles']:
- match_roles = [x for x in roles
- if x['id'] == trust_role['id']]
+ # First expand out any roles that were in the trust to include
+ # any implied roles, whether global or domain specific
+ refs = [{'role_id': role['id']} for role in trust['roles']]
+ effective_trust_roles = (
+ self.assignment_api.add_implied_roles(refs))
+ # Now get the current role assignments for the trustor,
+ # including any domain specific roles.
+ assignment_list = self.assignment_api.list_role_assignments(
+ user_id=token_user_id,
+ project_id=token_project_id,
+ effective=True, strip_domain_roles=False)
+ current_effective_trustor_roles = (
+ list(set([x['role_id'] for x in assignment_list])))
+ # Go through each of the effective trust roles, making sure the
+ # trustor still has them, if any have been removed, then we
+ # will treat the trust as invalid
+ for trust_role in effective_trust_roles:
+
+ match_roles = [x for x in current_effective_trustor_roles
+ if x == trust_role['role_id']]
if match_roles:
- filtered_roles.append(match_roles[0])
+ role = self.role_api.get_role(match_roles[0])
+ if role['domain_id'] is None:
+ filtered_roles.append(role)
else:
raise exception.Forbidden(
_('Trustee has no delegated roles.'))
else:
- for role in roles:
+ for role in self._get_roles_for_user(token_user_id,
+ token_domain_id,
+ token_project_id):
filtered_roles.append({'id': role['id'],
'name': role['name']})
@@ -426,7 +467,6 @@ class V3TokenDataHelper(object):
if project_id or domain_id:
service_catalog = self.catalog_api.get_v3_catalog(
user_id, project_id)
- # TODO(ayoung): Enforce Endpoints for trust
token_data['catalog'] = service_catalog
def _populate_service_providers(self, token_data):
@@ -458,20 +498,11 @@ class V3TokenDataHelper(object):
LOG.error(msg)
raise exception.UnexpectedError(msg)
- def get_token_data(self, user_id, method_names, extras=None,
- domain_id=None, project_id=None, expires=None,
- trust=None, token=None, include_catalog=True,
- bind=None, access_token=None, issued_at=None,
- audit_info=None):
- if extras is None:
- extras = {}
- if extras:
- versionutils.deprecated(
- what='passing token data with "extras"',
- as_of=versionutils.deprecated.KILO,
- in_favor_of='well-defined APIs')(lambda: None)()
- token_data = {'methods': method_names,
- 'extras': extras}
+ def get_token_data(self, user_id, method_names, domain_id=None,
+ project_id=None, expires=None, trust=None, token=None,
+ include_catalog=True, bind=None, access_token=None,
+ issued_at=None, audit_info=None):
+ token_data = {'methods': method_names}
# We've probably already written these to the token
if token:
@@ -479,14 +510,12 @@ class V3TokenDataHelper(object):
if x in token:
token_data[x] = token[x]
- if CONF.trust.enabled and trust:
- if user_id != trust['trustee_user_id']:
- raise exception.Forbidden(_('User is not a trustee.'))
-
if bind:
token_data['bind'] = bind
self._populate_scope(token_data, domain_id, project_id)
+ if token_data.get('project'):
+ self._populate_is_admin_project(token_data)
self._populate_user(token_data, user_id, trust)
self._populate_roles(token_data, user_id, domain_id, project_id, trust,
access_token)
@@ -527,6 +556,11 @@ class BaseProvider(provider.Provider):
def issue_v2_token(self, token_ref, roles_ref=None,
catalog_ref=None):
+ if token_ref.get('bind') and not self._supports_bind_authentication:
+ msg = _('The configured token provider does not support bind '
+ 'authentication.')
+ raise exception.NotImplemented(message=msg)
+
metadata_ref = token_ref['metadata']
trust_ref = None
if CONF.trust.enabled and metadata_ref and 'trust_id' in metadata_ref:
@@ -559,6 +593,10 @@ class BaseProvider(provider.Provider):
'trust_id' in metadata_ref):
trust = self.trust_api.get_trust(metadata_ref['trust_id'])
+ if CONF.trust.enabled and trust:
+ if user_id != trust['trustee_user_id']:
+ raise exception.Forbidden(_('User is not a trustee.'))
+
token_ref = None
if auth_context and self._is_mapped_token(auth_context):
token_ref = self._handle_mapped_tokens(
@@ -572,7 +610,6 @@ class BaseProvider(provider.Provider):
token_data = self.v3_token_data_helper.get_token_data(
user_id,
method_names,
- auth_context.get('extras') if auth_context else None,
domain_id=domain_id,
project_id=project_id,
expires=expires_at,
@@ -636,21 +673,10 @@ class BaseProvider(provider.Provider):
token.provider.V3):
# this is a V3 token
msg = _('Non-default domain is not supported')
- # user in a non-default is prohibited
- if (token_ref['token_data']['token']['user']['domain']['id'] !=
- CONF.identity.default_domain_id):
- raise exception.Unauthorized(msg)
# domain scoping is prohibited
if token_ref['token_data']['token'].get('domain'):
raise exception.Unauthorized(
_('Domain scoped token is not supported'))
- # project in non-default domain is prohibited
- if token_ref['token_data']['token'].get('project'):
- project = token_ref['token_data']['token']['project']
- project_domain_id = project['domain']['id']
- # scoped to project in non-default domain is prohibited
- if project_domain_id != CONF.identity.default_domain_id:
- raise exception.Unauthorized(msg)
# if token is scoped to trust, both trustor and trustee must
# be in the default domain. Furthermore, the delegated project
# must also be in the default domain
@@ -693,14 +719,58 @@ class BaseProvider(provider.Provider):
trust_id = token_data['access'].get('trust', {}).get('id')
if trust_id:
- # token trust validation
- self.trust_api.get_trust(trust_id)
+ msg = ('Unable to validate trust-scoped tokens using version '
+ 'v2.0 API.')
+ raise exception.Unauthorized(msg)
return token_data
- except exception.ValidationError as e:
+ except exception.ValidationError:
LOG.exception(_LE('Failed to validate token'))
+ token_id = token_ref['token_data']['access']['token']['id']
+ raise exception.TokenNotFound(token_id=token_id)
+
+ def validate_non_persistent_token(self, token_id):
+ try:
+ (user_id, methods, audit_ids, domain_id, project_id, trust_id,
+ federated_info, access_token_id, created_at, expires_at) = (
+ self.token_formatter.validate_token(token_id))
+ except exception.ValidationError as e:
raise exception.TokenNotFound(e)
+ token_dict = None
+ trust_ref = None
+ if federated_info:
+ # NOTE(lbragstad): We need to rebuild information about the
+ # federated token as well as the federated token roles. This is
+ # because when we validate a non-persistent token, we don't have a
+ # token reference to pull the federated token information out of.
+ # As a result, we have to extract it from the token itself and
+ # rebuild the federated context. These private methods currently
+ # live in the keystone.token.providers.fernet.Provider() class.
+ token_dict = self._rebuild_federated_info(federated_info, user_id)
+ if project_id or domain_id:
+ self._rebuild_federated_token_roles(token_dict, federated_info,
+ user_id, project_id,
+ domain_id)
+ if trust_id:
+ trust_ref = self.trust_api.get_trust(trust_id)
+
+ access_token = None
+ if access_token_id:
+ access_token = self.oauth_api.get_access_token(access_token_id)
+
+ return self.v3_token_data_helper.get_token_data(
+ user_id,
+ method_names=methods,
+ domain_id=domain_id,
+ project_id=project_id,
+ issued_at=created_at,
+ expires=expires_at,
+ trust=trust_ref,
+ token=token_dict,
+ access_token=access_token,
+ audit_info=audit_ids)
+
def validate_v3_token(self, token_ref):
# FIXME(gyee): performance or correctness? Should we return the
# cached token or reconstruct it? Obviously if we are going with
diff --git a/keystone-moon/keystone/token/providers/fernet/core.py b/keystone-moon/keystone/token/providers/fernet/core.py
index a71c375b..ff6fe9cc 100644
--- a/keystone-moon/keystone/token/providers/fernet/core.py
+++ b/keystone-moon/keystone/token/providers/fernet/core.py
@@ -11,23 +11,18 @@
# under the License.
from oslo_config import cfg
-from oslo_log import log
from keystone.common import dependency
from keystone.common import utils as ks_utils
-from keystone.contrib.federation import constants as federation_constants
-from keystone import exception
-from keystone.i18n import _
-from keystone.token import provider
+from keystone.federation import constants as federation_constants
from keystone.token.providers import common
from keystone.token.providers.fernet import token_formatters as tf
CONF = cfg.CONF
-LOG = log.getLogger(__name__)
-@dependency.requires('trust_api')
+@dependency.requires('trust_api', 'oauth_api')
class Provider(common.BaseProvider):
def __init__(self, *args, **kwargs):
super(Provider, self).__init__(*args, **kwargs)
@@ -38,65 +33,10 @@ class Provider(common.BaseProvider):
"""Should the token be written to a backend."""
return False
- def issue_v2_token(self, token_ref, roles_ref=None, catalog_ref=None):
- """Issue a V2 formatted token.
-
- :param token_ref: reference describing the token
- :param roles_ref: reference describing the roles for the token
- :param catalog_ref: reference describing the token's catalog
- :returns: tuple containing the ID of the token and the token data
-
- """
- # TODO(lbragstad): Currently, Fernet tokens don't support bind in the
- # token format. Raise a 501 if we're dealing with bind.
- if token_ref.get('bind'):
- raise exception.NotImplemented()
-
- user_id = token_ref['user']['id']
- # Default to password since methods not provided by token_ref
- method_names = ['password']
- project_id = None
- # Verify that tenant is not None in token_ref
- if token_ref.get('tenant'):
- project_id = token_ref['tenant']['id']
-
- # maintain expiration time across rescopes
- expires = token_ref.get('expires')
-
- parent_audit_id = token_ref.get('parent_audit_id')
- # If parent_audit_id is defined then a token authentication was made
- if parent_audit_id:
- method_names.append('token')
-
- audit_ids = provider.audit_info(parent_audit_id)
-
- # Get v3 token data and exclude building v3 specific catalog. This is
- # due to the fact that the V2TokenDataHelper.format_token() method
- # doesn't build any of the token_reference from other Keystone APIs.
- # Instead, it builds it from what is persisted in the token reference.
- # Here we are going to leverage the V3TokenDataHelper.get_token_data()
- # method written for V3 because it goes through and populates the token
- # reference dynamically. Once we have a V3 token reference, we can
- # attempt to convert it to a V2 token response.
- v3_token_data = self.v3_token_data_helper.get_token_data(
- user_id,
- method_names,
- project_id=project_id,
- token=token_ref,
- include_catalog=False,
- audit_info=audit_ids,
- expires=expires)
-
- expires_at = v3_token_data['token']['expires_at']
- token_id = self.token_formatter.create_token(user_id, expires_at,
- audit_ids,
- methods=method_names,
- project_id=project_id)
- self._build_issued_at_info(token_id, v3_token_data)
- # Convert v3 to v2 token data and build v2 catalog
- token_data = self.v2_token_data_helper.v3_to_v2_token(v3_token_data)
- token_data['access']['token']['id'] = token_id
-
+ def issue_v2_token(self, *args, **kwargs):
+ token_id, token_data = super(Provider, self).issue_v2_token(
+ *args, **kwargs)
+ self._build_issued_at_info(token_id, token_data)
return token_id, token_data
def issue_v3_token(self, *args, **kwargs):
@@ -117,8 +57,12 @@ class Provider(common.BaseProvider):
# that we have to rely on when we validate the token.
fernet_creation_datetime_obj = self.token_formatter.creation_time(
token_id)
- token_data['token']['issued_at'] = ks_utils.isotime(
- at=fernet_creation_datetime_obj, subsecond=True)
+ if token_data.get('access'):
+ token_data['access']['token']['issued_at'] = ks_utils.isotime(
+ at=fernet_creation_datetime_obj, subsecond=True)
+ else:
+ token_data['token']['issued_at'] = ks_utils.isotime(
+ at=fernet_creation_datetime_obj, subsecond=True)
def _build_federated_info(self, token_data):
"""Extract everything needed for federated tokens.
@@ -127,18 +71,18 @@ class Provider(common.BaseProvider):
the values and build federated Fernet tokens.
"""
- idp_id = token_data['token'].get('user', {}).get(
- federation_constants.FEDERATION, {}).get(
- 'identity_provider', {}).get('id')
- protocol_id = token_data['token'].get('user', {}).get(
- federation_constants.FEDERATION, {}).get('protocol', {}).get('id')
- # If we don't have an identity provider ID and a protocol ID, it's safe
- # to assume we aren't dealing with a federated token.
- if not (idp_id and protocol_id):
- return None
-
- group_ids = token_data['token'].get('user', {}).get(
- federation_constants.FEDERATION, {}).get('groups')
+ token_data = token_data['token']
+ try:
+ user = token_data['user']
+ federation = user[federation_constants.FEDERATION]
+ idp_id = federation['identity_provider']['id']
+ protocol_id = federation['protocol']['id']
+ except KeyError:
+ # The token data doesn't have federated info, so we aren't dealing
+ # with a federated token and no federated info to build.
+ return
+
+ group_ids = federation.get('groups')
return {'group_ids': group_ids,
'idp_id': idp_id,
@@ -195,96 +139,66 @@ class Provider(common.BaseProvider):
self.v3_token_data_helper.populate_roles_for_groups(
token_dict, group_ids, project_id, domain_id, user_id)
- def validate_v2_token(self, token_ref):
- """Validate a V2 formatted token.
-
- :param token_ref: reference describing the token to validate
- :returns: the token data
- :raises keystone.exception.TokenNotFound: if token format is invalid
- :raises keystone.exception.Unauthorized: if v3 token is used
-
- """
- try:
- (user_id, methods,
- audit_ids, domain_id,
- project_id, trust_id,
- federated_info, created_at,
- expires_at) = self.token_formatter.validate_token(token_ref)
- except exception.ValidationError as e:
- raise exception.TokenNotFound(e)
-
- if trust_id or domain_id or federated_info:
- msg = _('This is not a v2.0 Fernet token. Use v3 for trust, '
- 'domain, or federated tokens.')
- raise exception.Unauthorized(msg)
-
- v3_token_data = self.v3_token_data_helper.get_token_data(
- user_id,
- methods,
- project_id=project_id,
- expires=expires_at,
- issued_at=created_at,
- token=token_ref,
- include_catalog=False,
- audit_info=audit_ids)
- token_data = self.v2_token_data_helper.v3_to_v2_token(v3_token_data)
- token_data['access']['token']['id'] = token_ref
- return token_data
-
- def validate_v3_token(self, token):
- """Validate a V3 formatted token.
-
- :param token: a string describing the token to validate
- :returns: the token data
- :raises keystone.exception.TokenNotFound: if token format version isn't
- supported
-
- """
- try:
- (user_id, methods, audit_ids, domain_id, project_id, trust_id,
- federated_info, created_at, expires_at) = (
- self.token_formatter.validate_token(token))
- except exception.ValidationError as e:
- raise exception.TokenNotFound(e)
-
- token_dict = None
- trust_ref = None
- if federated_info:
- token_dict = self._rebuild_federated_info(federated_info, user_id)
- if project_id or domain_id:
- self._rebuild_federated_token_roles(token_dict, federated_info,
- user_id, project_id,
- domain_id)
- if trust_id:
- trust_ref = self.trust_api.get_trust(trust_id)
-
- return self.v3_token_data_helper.get_token_data(
- user_id,
- method_names=methods,
- domain_id=domain_id,
- project_id=project_id,
- issued_at=created_at,
- expires=expires_at,
- trust=trust_ref,
- token=token_dict,
- audit_info=audit_ids)
+ def _extract_v2_token_data(self, token_data):
+ user_id = token_data['access']['user']['id']
+ expires_at = token_data['access']['token']['expires']
+ audit_ids = token_data['access']['token'].get('audit_ids')
+ methods = ['password']
+ if len(audit_ids) > 1:
+ methods.append('token')
+ project_id = token_data['access']['token'].get('tenant', {}).get('id')
+ domain_id = None
+ trust_id = None
+ access_token_id = None
+ federated_info = None
+ return (user_id, expires_at, audit_ids, methods, domain_id, project_id,
+ trust_id, access_token_id, federated_info)
+
+ def _extract_v3_token_data(self, token_data):
+ """Extract information from a v3 token reference."""
+ user_id = token_data['token']['user']['id']
+ expires_at = token_data['token']['expires_at']
+ audit_ids = token_data['token']['audit_ids']
+ methods = token_data['token'].get('methods')
+ domain_id = token_data['token'].get('domain', {}).get('id')
+ project_id = token_data['token'].get('project', {}).get('id')
+ trust_id = token_data['token'].get('OS-TRUST:trust', {}).get('id')
+ access_token_id = token_data['token'].get('OS-OAUTH1', {}).get(
+ 'access_token_id')
+ federated_info = self._build_federated_info(token_data)
+
+ return (user_id, expires_at, audit_ids, methods, domain_id, project_id,
+ trust_id, access_token_id, federated_info)
def _get_token_id(self, token_data):
"""Generate the token_id based upon the data in token_data.
:param token_data: token information
:type token_data: dict
- :raises keystone.exception.NotImplemented: when called
+ :rtype: six.text_type
+
"""
+ # NOTE(lbragstad): Only v2.0 token responses include an 'access'
+ # attribute.
+ if token_data.get('access'):
+ (user_id, expires_at, audit_ids, methods, domain_id, project_id,
+ trust_id, access_token_id, federated_info) = (
+ self._extract_v2_token_data(token_data))
+ else:
+ (user_id, expires_at, audit_ids, methods, domain_id, project_id,
+ trust_id, access_token_id, federated_info) = (
+ self._extract_v3_token_data(token_data))
+
return self.token_formatter.create_token(
- token_data['token']['user']['id'],
- token_data['token']['expires_at'],
- token_data['token']['audit_ids'],
- methods=token_data['token'].get('methods'),
- domain_id=token_data['token'].get('domain', {}).get('id'),
- project_id=token_data['token'].get('project', {}).get('id'),
- trust_id=token_data['token'].get('OS-TRUST:trust', {}).get('id'),
- federated_info=self._build_federated_info(token_data)
+ user_id,
+ expires_at,
+ audit_ids,
+ methods=methods,
+ domain_id=domain_id,
+ project_id=project_id,
+ trust_id=trust_id,
+ federated_info=federated_info,
+ access_token_id=access_token_id
)
@property
@@ -292,5 +206,6 @@ class Provider(common.BaseProvider):
"""Return if the token provider supports bind authentication methods.
:returns: False
+
"""
return False
diff --git a/keystone-moon/keystone/token/providers/fernet/token_formatters.py b/keystone-moon/keystone/token/providers/fernet/token_formatters.py
index dbfee6dd..dfdd06e8 100644
--- a/keystone-moon/keystone/token/providers/fernet/token_formatters.py
+++ b/keystone-moon/keystone/token/providers/fernet/token_formatters.py
@@ -20,7 +20,6 @@ import msgpack
from oslo_config import cfg
from oslo_log import log
from oslo_utils import timeutils
-import six
from six.moves import map
from six.moves import urllib
@@ -66,14 +65,22 @@ class TokenFormatter(object):
return fernet.MultiFernet(fernet_instances)
def pack(self, payload):
- """Pack a payload for transport as a token."""
+ """Pack a payload for transport as a token.
+
+ :type payload: six.binary_type
+ :rtype: six.text_type
+
+ """
# base64 padding (if any) is not URL-safe
- return self.crypto.encrypt(payload).rstrip('=')
+ return self.crypto.encrypt(payload).rstrip(b'=').decode('utf-8')
def unpack(self, token):
- """Unpack a token, and validate the payload."""
- token = six.binary_type(token)
+ """Unpack a token, and validate the payload.
+ :type token: six.text_type
+ :rtype: six.binary_type
+
+ """
# TODO(lbragstad): Restore padding on token before decoding it.
# Initially in Kilo, Fernet tokens were returned to the user with
# padding appended to the token. Later in Liberty this padding was
@@ -89,16 +96,17 @@ class TokenFormatter(object):
token = TokenFormatter.restore_padding(token)
try:
- return self.crypto.decrypt(token)
+ return self.crypto.decrypt(token.encode('utf-8'))
except fernet.InvalidToken:
raise exception.ValidationError(
- _('This is not a recognized Fernet token'))
+ _('This is not a recognized Fernet token %s') % token)
@classmethod
def restore_padding(cls, token):
"""Restore padding based on token size.
:param token: token to restore padding on
+ :type token: six.text_type
:returns: token with correct padding
"""
@@ -106,21 +114,22 @@ class TokenFormatter(object):
mod_returned = len(token) % 4
if mod_returned:
missing_padding = 4 - mod_returned
- token += b'=' * missing_padding
+ token += '=' * missing_padding
return token
@classmethod
def creation_time(cls, fernet_token):
- """Returns the creation time of a valid Fernet token."""
- # tokens may be transmitted as Unicode, but they're just ASCII
- # (pypi/cryptography will refuse to operate on Unicode input)
- fernet_token = six.binary_type(fernet_token)
+ """Returns the creation time of a valid Fernet token.
- # Restore padding on token before decoding it
+ :type fernet_token: six.text_type
+
+ """
fernet_token = TokenFormatter.restore_padding(fernet_token)
+ # fernet_token is six.text_type
- # fernet tokens are base64 encoded, so we need to unpack them first
- token_bytes = base64.urlsafe_b64decode(fernet_token)
+ # Fernet tokens are base64 encoded, so we need to unpack them first
+ # urlsafe_b64decode() requires six.binary_type
+ token_bytes = base64.urlsafe_b64decode(fernet_token.encode('utf-8'))
# slice into the byte array to get just the timestamp
timestamp_bytes = token_bytes[TIMESTAMP_START:TIMESTAMP_END]
@@ -136,66 +145,20 @@ class TokenFormatter(object):
def create_token(self, user_id, expires_at, audit_ids, methods=None,
domain_id=None, project_id=None, trust_id=None,
- federated_info=None):
+ federated_info=None, access_token_id=None):
"""Given a set of payload attributes, generate a Fernet token."""
- if trust_id:
- version = TrustScopedPayload.version
- payload = TrustScopedPayload.assemble(
- user_id,
- methods,
- project_id,
- expires_at,
- audit_ids,
- trust_id)
- elif project_id and federated_info:
- version = FederatedProjectScopedPayload.version
- payload = FederatedProjectScopedPayload.assemble(
- user_id,
- methods,
- project_id,
- expires_at,
- audit_ids,
- federated_info)
- elif domain_id and federated_info:
- version = FederatedDomainScopedPayload.version
- payload = FederatedDomainScopedPayload.assemble(
- user_id,
- methods,
- domain_id,
- expires_at,
- audit_ids,
- federated_info)
- elif federated_info:
- version = FederatedUnscopedPayload.version
- payload = FederatedUnscopedPayload.assemble(
- user_id,
- methods,
- expires_at,
- audit_ids,
- federated_info)
- elif project_id:
- version = ProjectScopedPayload.version
- payload = ProjectScopedPayload.assemble(
- user_id,
- methods,
- project_id,
- expires_at,
- audit_ids)
- elif domain_id:
- version = DomainScopedPayload.version
- payload = DomainScopedPayload.assemble(
- user_id,
- methods,
- domain_id,
- expires_at,
- audit_ids)
- else:
- version = UnscopedPayload.version
- payload = UnscopedPayload.assemble(
- user_id,
- methods,
- expires_at,
- audit_ids)
+ for payload_class in PAYLOAD_CLASSES:
+ if payload_class.create_arguments_apply(
+ project_id=project_id, domain_id=domain_id,
+ trust_id=trust_id, federated_info=federated_info,
+ access_token_id=access_token_id):
+ break
+
+ version = payload_class.version
+ payload = payload_class.assemble(
+ user_id, methods, project_id, domain_id, expires_at, audit_ids,
+ trust_id, federated_info, access_token_id
+ )
versioned_payload = (version,) + payload
serialized_payload = msgpack.packb(versioned_payload)
@@ -215,44 +178,21 @@ class TokenFormatter(object):
return token
def validate_token(self, token):
- """Validates a Fernet token and returns the payload attributes."""
- # Convert v2 unicode token to a string
- if not isinstance(token, six.binary_type):
- token = token.encode('ascii')
+ """Validates a Fernet token and returns the payload attributes.
+ :type token: six.text_type
+
+ """
serialized_payload = self.unpack(token)
versioned_payload = msgpack.unpackb(serialized_payload)
version, payload = versioned_payload[0], versioned_payload[1:]
- # depending on the formatter, these may or may not be defined
- domain_id = None
- project_id = None
- trust_id = None
- federated_info = None
-
- if version == UnscopedPayload.version:
- (user_id, methods, expires_at, audit_ids) = (
- UnscopedPayload.disassemble(payload))
- elif version == DomainScopedPayload.version:
- (user_id, methods, domain_id, expires_at, audit_ids) = (
- DomainScopedPayload.disassemble(payload))
- elif version == ProjectScopedPayload.version:
- (user_id, methods, project_id, expires_at, audit_ids) = (
- ProjectScopedPayload.disassemble(payload))
- elif version == TrustScopedPayload.version:
- (user_id, methods, project_id, expires_at, audit_ids, trust_id) = (
- TrustScopedPayload.disassemble(payload))
- elif version == FederatedUnscopedPayload.version:
- (user_id, methods, expires_at, audit_ids, federated_info) = (
- FederatedUnscopedPayload.disassemble(payload))
- elif version == FederatedProjectScopedPayload.version:
- (user_id, methods, project_id, expires_at, audit_ids,
- federated_info) = FederatedProjectScopedPayload.disassemble(
- payload)
- elif version == FederatedDomainScopedPayload.version:
- (user_id, methods, domain_id, expires_at, audit_ids,
- federated_info) = FederatedDomainScopedPayload.disassemble(
- payload)
+ for payload_class in PAYLOAD_CLASSES:
+ if version == payload_class.version:
+ (user_id, methods, project_id, domain_id, expires_at,
+ audit_ids, trust_id, federated_info, access_token_id) = (
+ payload_class.disassemble(payload))
+ break
else:
# If the token_format is not recognized, raise ValidationError.
raise exception.ValidationError(_(
@@ -267,7 +207,7 @@ class TokenFormatter(object):
expires_at = ks_utils.isotime(at=expires_at, subsecond=True)
return (user_id, methods, audit_ids, domain_id, project_id, trust_id,
- federated_info, created_at, expires_at)
+ federated_info, access_token_id, created_at, expires_at)
class BasePayload(object):
@@ -275,10 +215,32 @@ class BasePayload(object):
version = None
@classmethod
- def assemble(cls, *args):
+ def create_arguments_apply(cls, **kwargs):
+ """Check the arguments to see if they apply to this payload variant.
+
+ :returns: True if the arguments indicate that this payload class is
+ needed for the token otherwise returns False.
+ :rtype: bool
+
+ """
+ raise NotImplementedError()
+
+ @classmethod
+ def assemble(cls, user_id, methods, project_id, domain_id, expires_at,
+ audit_ids, trust_id, federated_info, access_token_id):
"""Assemble the payload of a token.
- :param args: whatever data should go into the payload
+ :param user_id: identifier of the user in the token request
+ :param methods: list of authentication methods used
+ :param project_id: ID of the project to scope to
+ :param domain_id: ID of the domain to scope to
+ :param expires_at: datetime of the token's expiration
+ :param audit_ids: list of the token's audit IDs
+ :param trust_id: ID of the trust in effect
+ :param federated_info: dictionary containing group IDs, the identity
+ provider ID, protocol ID, and federated domain
+ ID
+ :param access_token_id: ID of the secret in OAuth1 authentication
:returns: the payload of a token
"""
@@ -288,6 +250,17 @@ class BasePayload(object):
def disassemble(cls, payload):
"""Disassemble an unscoped payload into the component data.
+ The tuple consists of::
+
+ (user_id, methods, project_id, domain_id, expires_at_str,
+ audit_ids, trust_id, federated_info, access_token_id)
+
+ * ``methods`` are the auth methods.
+ * federated_info is a dict contains the group IDs, the identity
+ provider ID, the protocol ID, and the federated domain ID
+
+ Fields will be set to None if they didn't apply to this payload type.
+
:param payload: this variant of payload
:returns: a tuple of the payloads component data
@@ -302,9 +275,6 @@ class BasePayload(object):
:returns: a byte representation of the uuid
"""
- # TODO(lbragstad): Wrap this in an exception. Not sure what the case
- # would be where we couldn't handle what we've been given but incase
- # the integrity of the token has been compromised.
uuid_obj = uuid.UUID(uuid_string)
return uuid_obj.bytes
@@ -316,18 +286,15 @@ class BasePayload(object):
:returns: uuid hex formatted string
"""
- # TODO(lbragstad): Wrap this in an exception. Not sure what the case
- # would be where we couldn't handle what we've been given but incase
- # the integrity of the token has been compromised.
uuid_obj = uuid.UUID(bytes=uuid_byte_string)
return uuid_obj.hex
@classmethod
- def _convert_time_string_to_int(cls, time_string):
- """Convert a time formatted string to a timestamp integer.
+ def _convert_time_string_to_float(cls, time_string):
+ """Convert a time formatted string to a float.
:param time_string: time formatted string
- :returns: an integer timestamp
+ :returns: a timestamp as a float
"""
time_object = timeutils.parse_isotime(time_string)
@@ -335,14 +302,14 @@ class BasePayload(object):
datetime.datetime.utcfromtimestamp(0)).total_seconds()
@classmethod
- def _convert_int_to_time_string(cls, time_int):
- """Convert a timestamp integer to a string.
+ def _convert_float_to_time_string(cls, time_float):
+ """Convert a floating point timestamp to a string.
- :param time_int: integer representing timestamp
+ :param time_float: integer representing timestamp
:returns: a time formatted strings
"""
- time_object = datetime.datetime.utcfromtimestamp(time_int)
+ time_object = datetime.datetime.utcfromtimestamp(time_float)
return ks_utils.isotime(time_object, subsecond=True)
@classmethod
@@ -361,74 +328,51 @@ class BasePayload(object):
# federation)
return (False, value)
- @classmethod
- def attempt_convert_uuid_bytes_to_hex(cls, value):
- """Attempt to convert value to hex or return value.
-
- :param value: value to attempt to convert to hex
- :returns: uuid value in hex or value
-
- """
- try:
- return cls.convert_uuid_bytes_to_hex(value)
- except ValueError:
- return value
-
class UnscopedPayload(BasePayload):
version = 0
@classmethod
- def assemble(cls, user_id, methods, expires_at, audit_ids):
- """Assemble the payload of an unscoped token.
-
- :param user_id: identifier of the user in the token request
- :param methods: list of authentication methods used
- :param expires_at: datetime of the token's expiration
- :param audit_ids: list of the token's audit IDs
- :returns: the payload of an unscoped token
+ def create_arguments_apply(cls, **kwargs):
+ return True
- """
+ @classmethod
+ def assemble(cls, user_id, methods, project_id, domain_id, expires_at,
+ audit_ids, trust_id, federated_info, access_token_id):
b_user_id = cls.attempt_convert_uuid_hex_to_bytes(user_id)
methods = auth_plugins.convert_method_list_to_integer(methods)
- expires_at_int = cls._convert_time_string_to_int(expires_at)
+ expires_at_int = cls._convert_time_string_to_float(expires_at)
b_audit_ids = list(map(provider.random_urlsafe_str_to_bytes,
audit_ids))
return (b_user_id, methods, expires_at_int, b_audit_ids)
@classmethod
def disassemble(cls, payload):
- """Disassemble an unscoped payload into the component data.
-
- :param payload: the payload of an unscoped token
- :return: a tuple containing the user_id, auth methods, expires_at, and
- audit_ids
-
- """
(is_stored_as_bytes, user_id) = payload[0]
if is_stored_as_bytes:
- user_id = cls.attempt_convert_uuid_bytes_to_hex(user_id)
+ user_id = cls.convert_uuid_bytes_to_hex(user_id)
methods = auth_plugins.convert_integer_to_method_list(payload[1])
- expires_at_str = cls._convert_int_to_time_string(payload[2])
+ expires_at_str = cls._convert_float_to_time_string(payload[2])
audit_ids = list(map(provider.base64_encode, payload[3]))
- return (user_id, methods, expires_at_str, audit_ids)
+ project_id = None
+ domain_id = None
+ trust_id = None
+ federated_info = None
+ access_token_id = None
+ return (user_id, methods, project_id, domain_id, expires_at_str,
+ audit_ids, trust_id, federated_info, access_token_id)
class DomainScopedPayload(BasePayload):
version = 1
@classmethod
- def assemble(cls, user_id, methods, domain_id, expires_at, audit_ids):
- """Assemble the payload of a domain-scoped token.
+ def create_arguments_apply(cls, **kwargs):
+ return kwargs['domain_id']
- :param user_id: ID of the user in the token request
- :param methods: list of authentication methods used
- :param domain_id: ID of the domain to scope to
- :param expires_at: datetime of the token's expiration
- :param audit_ids: list of the token's audit IDs
- :returns: the payload of a domain-scoped token
-
- """
+ @classmethod
+ def assemble(cls, user_id, methods, project_id, domain_id, expires_at,
+ audit_ids, trust_id, federated_info, access_token_id):
b_user_id = cls.attempt_convert_uuid_hex_to_bytes(user_id)
methods = auth_plugins.convert_method_list_to_integer(methods)
try:
@@ -439,23 +383,16 @@ class DomainScopedPayload(BasePayload):
b_domain_id = domain_id
else:
raise
- expires_at_int = cls._convert_time_string_to_int(expires_at)
+ expires_at_int = cls._convert_time_string_to_float(expires_at)
b_audit_ids = list(map(provider.random_urlsafe_str_to_bytes,
audit_ids))
return (b_user_id, methods, b_domain_id, expires_at_int, b_audit_ids)
@classmethod
def disassemble(cls, payload):
- """Disassemble a payload into the component data.
-
- :param payload: the payload of a token
- :return: a tuple containing the user_id, auth methods, domain_id,
- expires_at_str, and audit_ids
-
- """
(is_stored_as_bytes, user_id) = payload[0]
if is_stored_as_bytes:
- user_id = cls.attempt_convert_uuid_bytes_to_hex(user_id)
+ user_id = cls.convert_uuid_bytes_to_hex(user_id)
methods = auth_plugins.convert_integer_to_method_list(payload[1])
try:
domain_id = cls.convert_uuid_bytes_to_hex(payload[2])
@@ -465,79 +402,68 @@ class DomainScopedPayload(BasePayload):
domain_id = payload[2]
else:
raise
- expires_at_str = cls._convert_int_to_time_string(payload[3])
+ expires_at_str = cls._convert_float_to_time_string(payload[3])
audit_ids = list(map(provider.base64_encode, payload[4]))
-
- return (user_id, methods, domain_id, expires_at_str, audit_ids)
+ project_id = None
+ trust_id = None
+ federated_info = None
+ access_token_id = None
+ return (user_id, methods, project_id, domain_id, expires_at_str,
+ audit_ids, trust_id, federated_info, access_token_id)
class ProjectScopedPayload(BasePayload):
version = 2
@classmethod
- def assemble(cls, user_id, methods, project_id, expires_at, audit_ids):
- """Assemble the payload of a project-scoped token.
+ def create_arguments_apply(cls, **kwargs):
+ return kwargs['project_id']
- :param user_id: ID of the user in the token request
- :param methods: list of authentication methods used
- :param project_id: ID of the project to scope to
- :param expires_at: datetime of the token's expiration
- :param audit_ids: list of the token's audit IDs
- :returns: the payload of a project-scoped token
-
- """
+ @classmethod
+ def assemble(cls, user_id, methods, project_id, domain_id, expires_at,
+ audit_ids, trust_id, federated_info, access_token_id):
b_user_id = cls.attempt_convert_uuid_hex_to_bytes(user_id)
methods = auth_plugins.convert_method_list_to_integer(methods)
b_project_id = cls.attempt_convert_uuid_hex_to_bytes(project_id)
- expires_at_int = cls._convert_time_string_to_int(expires_at)
+ expires_at_int = cls._convert_time_string_to_float(expires_at)
b_audit_ids = list(map(provider.random_urlsafe_str_to_bytes,
audit_ids))
return (b_user_id, methods, b_project_id, expires_at_int, b_audit_ids)
@classmethod
def disassemble(cls, payload):
- """Disassemble a payload into the component data.
-
- :param payload: the payload of a token
- :return: a tuple containing the user_id, auth methods, project_id,
- expires_at_str, and audit_ids
-
- """
(is_stored_as_bytes, user_id) = payload[0]
if is_stored_as_bytes:
- user_id = cls.attempt_convert_uuid_bytes_to_hex(user_id)
+ user_id = cls.convert_uuid_bytes_to_hex(user_id)
methods = auth_plugins.convert_integer_to_method_list(payload[1])
(is_stored_as_bytes, project_id) = payload[2]
if is_stored_as_bytes:
- project_id = cls.attempt_convert_uuid_bytes_to_hex(project_id)
- expires_at_str = cls._convert_int_to_time_string(payload[3])
+ project_id = cls.convert_uuid_bytes_to_hex(project_id)
+ expires_at_str = cls._convert_float_to_time_string(payload[3])
audit_ids = list(map(provider.base64_encode, payload[4]))
-
- return (user_id, methods, project_id, expires_at_str, audit_ids)
+ domain_id = None
+ trust_id = None
+ federated_info = None
+ access_token_id = None
+ return (user_id, methods, project_id, domain_id, expires_at_str,
+ audit_ids, trust_id, federated_info, access_token_id)
class TrustScopedPayload(BasePayload):
version = 3
@classmethod
- def assemble(cls, user_id, methods, project_id, expires_at, audit_ids,
- trust_id):
- """Assemble the payload of a trust-scoped token.
-
- :param user_id: ID of the user in the token request
- :param methods: list of authentication methods used
- :param project_id: ID of the project to scope to
- :param expires_at: datetime of the token's expiration
- :param audit_ids: list of the token's audit IDs
- :param trust_id: ID of the trust in effect
- :returns: the payload of a trust-scoped token
+ def create_arguments_apply(cls, **kwargs):
+ return kwargs['trust_id']
- """
+ @classmethod
+ def assemble(cls, user_id, methods, project_id, domain_id, expires_at,
+ audit_ids, trust_id, federated_info, access_token_id):
b_user_id = cls.attempt_convert_uuid_hex_to_bytes(user_id)
methods = auth_plugins.convert_method_list_to_integer(methods)
b_project_id = cls.attempt_convert_uuid_hex_to_bytes(project_id)
b_trust_id = cls.convert_uuid_hex_to_bytes(trust_id)
- expires_at_int = cls._convert_time_string_to_int(expires_at)
+ expires_at_int = cls._convert_time_string_to_float(expires_at)
b_audit_ids = list(map(provider.random_urlsafe_str_to_bytes,
audit_ids))
@@ -546,32 +472,31 @@ class TrustScopedPayload(BasePayload):
@classmethod
def disassemble(cls, payload):
- """Validate a trust-based payload.
-
- :param token_string: a string representing the token
- :returns: a tuple containing the user_id, auth methods, project_id,
- expires_at_str, audit_ids, and trust_id
-
- """
(is_stored_as_bytes, user_id) = payload[0]
if is_stored_as_bytes:
- user_id = cls.attempt_convert_uuid_bytes_to_hex(user_id)
+ user_id = cls.convert_uuid_bytes_to_hex(user_id)
methods = auth_plugins.convert_integer_to_method_list(payload[1])
(is_stored_as_bytes, project_id) = payload[2]
if is_stored_as_bytes:
- project_id = cls.attempt_convert_uuid_bytes_to_hex(project_id)
- expires_at_str = cls._convert_int_to_time_string(payload[3])
+ project_id = cls.convert_uuid_bytes_to_hex(project_id)
+ expires_at_str = cls._convert_float_to_time_string(payload[3])
audit_ids = list(map(provider.base64_encode, payload[4]))
trust_id = cls.convert_uuid_bytes_to_hex(payload[5])
-
- return (user_id, methods, project_id, expires_at_str, audit_ids,
- trust_id)
+ domain_id = None
+ federated_info = None
+ access_token_id = None
+ return (user_id, methods, project_id, domain_id, expires_at_str,
+ audit_ids, trust_id, federated_info, access_token_id)
class FederatedUnscopedPayload(BasePayload):
version = 4
@classmethod
+ def create_arguments_apply(cls, **kwargs):
+ return kwargs['federated_info']
+
+ @classmethod
def pack_group_id(cls, group_dict):
return cls.attempt_convert_uuid_hex_to_bytes(group_dict['id'])
@@ -579,24 +504,12 @@ class FederatedUnscopedPayload(BasePayload):
def unpack_group_id(cls, group_id_in_bytes):
(is_stored_as_bytes, group_id) = group_id_in_bytes
if is_stored_as_bytes:
- group_id = cls.attempt_convert_uuid_bytes_to_hex(group_id)
+ group_id = cls.convert_uuid_bytes_to_hex(group_id)
return {'id': group_id}
@classmethod
- def assemble(cls, user_id, methods, expires_at, audit_ids, federated_info):
- """Assemble the payload of a federated token.
-
- :param user_id: ID of the user in the token request
- :param methods: list of authentication methods used
- :param expires_at: datetime of the token's expiration
- :param audit_ids: list of the token's audit IDs
- :param federated_info: dictionary containing group IDs, the identity
- provider ID, protocol ID, and federated domain
- ID
- :returns: the payload of a federated token
-
- """
-
+ def assemble(cls, user_id, methods, project_id, domain_id, expires_at,
+ audit_ids, trust_id, federated_info, access_token_id):
b_user_id = cls.attempt_convert_uuid_hex_to_bytes(user_id)
methods = auth_plugins.convert_method_list_to_integer(methods)
b_group_ids = list(map(cls.pack_group_id,
@@ -604,7 +517,7 @@ class FederatedUnscopedPayload(BasePayload):
b_idp_id = cls.attempt_convert_uuid_hex_to_bytes(
federated_info['idp_id'])
protocol_id = federated_info['protocol_id']
- expires_at_int = cls._convert_time_string_to_int(expires_at)
+ expires_at_int = cls._convert_time_string_to_float(expires_at)
b_audit_ids = list(map(provider.random_urlsafe_str_to_bytes,
audit_ids))
@@ -613,59 +526,43 @@ class FederatedUnscopedPayload(BasePayload):
@classmethod
def disassemble(cls, payload):
- """Validate a federated payload.
-
- :param token_string: a string representing the token
- :return: a tuple containing the user_id, auth methods, audit_ids, and a
- dictionary containing federated information such as the group
- IDs, the identity provider ID, the protocol ID, and the
- federated domain ID
-
- """
-
(is_stored_as_bytes, user_id) = payload[0]
if is_stored_as_bytes:
- user_id = cls.attempt_convert_uuid_bytes_to_hex(user_id)
+ user_id = cls.convert_uuid_bytes_to_hex(user_id)
methods = auth_plugins.convert_integer_to_method_list(payload[1])
group_ids = list(map(cls.unpack_group_id, payload[2]))
(is_stored_as_bytes, idp_id) = payload[3]
if is_stored_as_bytes:
- idp_id = cls.attempt_convert_uuid_bytes_to_hex(idp_id)
+ idp_id = cls.convert_uuid_bytes_to_hex(idp_id)
protocol_id = payload[4]
- expires_at_str = cls._convert_int_to_time_string(payload[5])
+ expires_at_str = cls._convert_float_to_time_string(payload[5])
audit_ids = list(map(provider.base64_encode, payload[6]))
federated_info = dict(group_ids=group_ids, idp_id=idp_id,
protocol_id=protocol_id)
- return (user_id, methods, expires_at_str, audit_ids, federated_info)
+ project_id = None
+ domain_id = None
+ trust_id = None
+ access_token_id = None
+ return (user_id, methods, project_id, domain_id, expires_at_str,
+ audit_ids, trust_id, federated_info, access_token_id)
class FederatedScopedPayload(FederatedUnscopedPayload):
version = None
@classmethod
- def assemble(cls, user_id, methods, scope_id, expires_at, audit_ids,
- federated_info):
- """Assemble the project-scoped payload of a federated token.
-
- :param user_id: ID of the user in the token request
- :param methods: list of authentication methods used
- :param scope_id: ID of the project or domain ID to scope to
- :param expires_at: datetime of the token's expiration
- :param audit_ids: list of the token's audit IDs
- :param federated_info: dictionary containing the identity provider ID,
- protocol ID, federated domain ID and group IDs
- :returns: the payload of a federated token
-
- """
+ def assemble(cls, user_id, methods, project_id, domain_id, expires_at,
+ audit_ids, trust_id, federated_info, access_token_id):
b_user_id = cls.attempt_convert_uuid_hex_to_bytes(user_id)
methods = auth_plugins.convert_method_list_to_integer(methods)
- b_scope_id = cls.attempt_convert_uuid_hex_to_bytes(scope_id)
+ b_scope_id = cls.attempt_convert_uuid_hex_to_bytes(
+ project_id or domain_id)
b_group_ids = list(map(cls.pack_group_id,
federated_info['group_ids']))
b_idp_id = cls.attempt_convert_uuid_hex_to_bytes(
federated_info['idp_id'])
protocol_id = federated_info['protocol_id']
- expires_at_int = cls._convert_time_string_to_int(expires_at)
+ expires_at_int = cls._convert_time_string_to_float(expires_at)
b_audit_ids = list(map(provider.random_urlsafe_str_to_bytes,
audit_ids))
@@ -674,39 +571,107 @@ class FederatedScopedPayload(FederatedUnscopedPayload):
@classmethod
def disassemble(cls, payload):
- """Validate a project-scoped federated payload.
-
- :param token_string: a string representing the token
- :returns: a tuple containing the user_id, auth methods, scope_id,
- expiration time (as str), audit_ids, and a dictionary
- containing federated information such as the the identity
- provider ID, the protocol ID, the federated domain ID and
- group IDs
-
- """
(is_stored_as_bytes, user_id) = payload[0]
if is_stored_as_bytes:
- user_id = cls.attempt_convert_uuid_bytes_to_hex(user_id)
+ user_id = cls.convert_uuid_bytes_to_hex(user_id)
methods = auth_plugins.convert_integer_to_method_list(payload[1])
(is_stored_as_bytes, scope_id) = payload[2]
if is_stored_as_bytes:
- scope_id = cls.attempt_convert_uuid_bytes_to_hex(scope_id)
+ scope_id = cls.convert_uuid_bytes_to_hex(scope_id)
+ project_id = (
+ scope_id
+ if cls.version == FederatedProjectScopedPayload.version else None)
+ domain_id = (
+ scope_id
+ if cls.version == FederatedDomainScopedPayload.version else None)
group_ids = list(map(cls.unpack_group_id, payload[3]))
(is_stored_as_bytes, idp_id) = payload[4]
if is_stored_as_bytes:
- idp_id = cls.attempt_convert_uuid_bytes_to_hex(idp_id)
+ idp_id = cls.convert_uuid_bytes_to_hex(idp_id)
protocol_id = payload[5]
- expires_at_str = cls._convert_int_to_time_string(payload[6])
+ expires_at_str = cls._convert_float_to_time_string(payload[6])
audit_ids = list(map(provider.base64_encode, payload[7]))
federated_info = dict(idp_id=idp_id, protocol_id=protocol_id,
group_ids=group_ids)
- return (user_id, methods, scope_id, expires_at_str, audit_ids,
- federated_info)
+ trust_id = None
+ access_token_id = None
+ return (user_id, methods, project_id, domain_id, expires_at_str,
+ audit_ids, trust_id, federated_info, access_token_id)
class FederatedProjectScopedPayload(FederatedScopedPayload):
version = 5
+ @classmethod
+ def create_arguments_apply(cls, **kwargs):
+ return kwargs['project_id'] and kwargs['federated_info']
+
class FederatedDomainScopedPayload(FederatedScopedPayload):
version = 6
+
+ @classmethod
+ def create_arguments_apply(cls, **kwargs):
+ return kwargs['domain_id'] and kwargs['federated_info']
+
+
+class OauthScopedPayload(BasePayload):
+ version = 7
+
+ @classmethod
+ def create_arguments_apply(cls, **kwargs):
+ return kwargs['access_token_id']
+
+ @classmethod
+ def assemble(cls, user_id, methods, project_id, domain_id, expires_at,
+ audit_ids, trust_id, federated_info, access_token_id):
+ b_user_id = cls.attempt_convert_uuid_hex_to_bytes(user_id)
+ methods = auth_plugins.convert_method_list_to_integer(methods)
+ b_project_id = cls.attempt_convert_uuid_hex_to_bytes(project_id)
+ expires_at_int = cls._convert_time_string_to_float(expires_at)
+ b_audit_ids = list(map(provider.random_urlsafe_str_to_bytes,
+ audit_ids))
+ b_access_token_id = cls.attempt_convert_uuid_hex_to_bytes(
+ access_token_id)
+ return (b_user_id, methods, b_project_id, b_access_token_id,
+ expires_at_int, b_audit_ids)
+
+ @classmethod
+ def disassemble(cls, payload):
+ (is_stored_as_bytes, user_id) = payload[0]
+ if is_stored_as_bytes:
+ user_id = cls.convert_uuid_bytes_to_hex(user_id)
+ methods = auth_plugins.convert_integer_to_method_list(payload[1])
+ (is_stored_as_bytes, project_id) = payload[2]
+ if is_stored_as_bytes:
+ project_id = cls.convert_uuid_bytes_to_hex(project_id)
+ (is_stored_as_bytes, access_token_id) = payload[3]
+ if is_stored_as_bytes:
+ access_token_id = cls.convert_uuid_bytes_to_hex(access_token_id)
+ expires_at_str = cls._convert_float_to_time_string(payload[4])
+ audit_ids = list(map(provider.base64_encode, payload[5]))
+ domain_id = None
+ trust_id = None
+ federated_info = None
+
+ return (user_id, methods, project_id, domain_id, expires_at_str,
+ audit_ids, trust_id, federated_info, access_token_id)
+
+
+# For now, the order of the classes in the following list is important. This
+# is because the way they test that the payload applies to them in
+# the create_arguments_apply method requires that the previous ones rejected
+# the payload arguments. For example, UnscopedPayload must be last since it's
+# the catch-all after all the other payloads have been checked.
+# TODO(blk-u): Clean up the create_arguments_apply methods so that they don't
+# depend on the previous classes then these can be in any order.
+PAYLOAD_CLASSES = [
+ OauthScopedPayload,
+ TrustScopedPayload,
+ FederatedProjectScopedPayload,
+ FederatedDomainScopedPayload,
+ FederatedUnscopedPayload,
+ ProjectScopedPayload,
+ DomainScopedPayload,
+ UnscopedPayload,
+]
diff --git a/keystone-moon/keystone/token/providers/fernet/utils.py b/keystone-moon/keystone/token/providers/fernet/utils.py
index 4235eda8..1c3552d4 100644
--- a/keystone-moon/keystone/token/providers/fernet/utils.py
+++ b/keystone-moon/keystone/token/providers/fernet/utils.py
@@ -25,29 +25,33 @@ LOG = log.getLogger(__name__)
CONF = cfg.CONF
-def validate_key_repository():
+def validate_key_repository(requires_write=False):
"""Validate permissions on the key repository directory."""
# NOTE(lbragstad): We shouldn't need to check if the directory was passed
# in as None because we don't set allow_no_values to True.
- # ensure current user has full access to the key repository
- if (not os.access(CONF.fernet_tokens.key_repository, os.R_OK) or not
- os.access(CONF.fernet_tokens.key_repository, os.W_OK) or not
- os.access(CONF.fernet_tokens.key_repository, os.X_OK)):
+ # ensure current user has sufficient access to the key repository
+ is_valid = (os.access(CONF.fernet_tokens.key_repository, os.R_OK) and
+ os.access(CONF.fernet_tokens.key_repository, os.X_OK))
+ if requires_write:
+ is_valid = (is_valid and
+ os.access(CONF.fernet_tokens.key_repository, os.W_OK))
+
+ if not is_valid:
LOG.error(
_LE('Either [fernet_tokens] key_repository does not exist or '
'Keystone does not have sufficient permission to access it: '
'%s'), CONF.fernet_tokens.key_repository)
- return False
-
- # ensure the key repository isn't world-readable
- stat_info = os.stat(CONF.fernet_tokens.key_repository)
- if stat_info.st_mode & stat.S_IROTH or stat_info.st_mode & stat.S_IXOTH:
- LOG.warning(_LW(
- '[fernet_tokens] key_repository is world readable: %s'),
- CONF.fernet_tokens.key_repository)
+ else:
+ # ensure the key repository isn't world-readable
+ stat_info = os.stat(CONF.fernet_tokens.key_repository)
+ if(stat_info.st_mode & stat.S_IROTH or
+ stat_info.st_mode & stat.S_IXOTH):
+ LOG.warning(_LW(
+ '[fernet_tokens] key_repository is world readable: %s'),
+ CONF.fernet_tokens.key_repository)
- return True
+ return is_valid
def _convert_to_integers(id_value):
@@ -99,7 +103,7 @@ def _create_new_key(keystone_user_id, keystone_group_id):
Create a new key that is readable by the Keystone group and Keystone user.
"""
- key = fernet.Fernet.generate_key()
+ key = fernet.Fernet.generate_key() # key is bytes
# This ensures the key created is not world-readable
old_umask = os.umask(0o177)
@@ -117,7 +121,7 @@ def _create_new_key(keystone_user_id, keystone_group_id):
key_file = os.path.join(CONF.fernet_tokens.key_repository, '0')
try:
with open(key_file, 'w') as f:
- f.write(key)
+ f.write(key.decode('utf-8')) # convert key to str for the file.
finally:
# After writing the key, set the umask back to it's original value. Do
# the same with group and user identifiers if a Keystone group or user
@@ -176,7 +180,7 @@ def rotate_keys(keystone_user_id=None, keystone_group_id=None):
if os.path.isfile(path):
try:
key_id = int(filename)
- except ValueError:
+ except ValueError: # nosec : name isn't a number, ignore the file.
pass
else:
key_files[key_id] = path
@@ -243,7 +247,8 @@ def load_keys():
with open(path, 'r') as key_file:
try:
key_id = int(filename)
- except ValueError:
+ except ValueError: # nosec : filename isn't a number, ignore
+ # this file since it's not a key.
pass
else:
keys[key_id] = key_file.read()
diff --git a/keystone-moon/keystone/token/providers/pki.py b/keystone-moon/keystone/token/providers/pki.py
index af8dc739..6a5a2999 100644
--- a/keystone-moon/keystone/token/providers/pki.py
+++ b/keystone-moon/keystone/token/providers/pki.py
@@ -17,6 +17,7 @@
from keystoneclient.common import cms
from oslo_config import cfg
from oslo_log import log
+from oslo_log import versionutils
from oslo_serialization import jsonutils
from keystone.common import environment
@@ -31,6 +32,10 @@ CONF = cfg.CONF
LOG = log.getLogger(__name__)
+@versionutils.deprecated(
+ as_of=versionutils.deprecated.MITAKA,
+ what='the PKI token provider',
+ in_favor_of='the Fernet or UUID token providers')
class Provider(common.BaseProvider):
def _get_token_id(self, token_data):
try:
diff --git a/keystone-moon/keystone/token/providers/pkiz.py b/keystone-moon/keystone/token/providers/pkiz.py
index b4e31918..3e78d2e4 100644
--- a/keystone-moon/keystone/token/providers/pkiz.py
+++ b/keystone-moon/keystone/token/providers/pkiz.py
@@ -15,6 +15,7 @@
from keystoneclient.common import cms
from oslo_config import cfg
from oslo_log import log
+from oslo_log import versionutils
from oslo_serialization import jsonutils
from keystone.common import environment
@@ -30,6 +31,10 @@ LOG = log.getLogger(__name__)
ERROR_MESSAGE = _('Unable to sign token.')
+@versionutils.deprecated(
+ as_of=versionutils.deprecated.MITAKA,
+ what='the PKIZ token provider',
+ in_favor_of='the Fernet or UUID token providers')
class Provider(common.BaseProvider):
def _get_token_id(self, token_data):
try: