aboutsummaryrefslogtreecommitdiffstats
path: root/keystonemiddleware-moon/keystonemiddleware
diff options
context:
space:
mode:
authorWuKong <rebirthmonkey@gmail.com>2015-07-01 08:54:55 +0200
committerWuKong <rebirthmonkey@gmail.com>2015-07-01 08:54:55 +0200
commit03bf0c32a0c656d4b91bebedc87a005e6d7563bb (patch)
tree7ab486ea98c8255bd28b345e9fd5b54d1b31c802 /keystonemiddleware-moon/keystonemiddleware
parent53d12675bc07feb552492df2d01fcd298167c363 (diff)
migrate openstack hook to opnfv
Change-Id: I1e828dae38820fdff93966e57691b344af01140f Signed-off-by: WuKong <rebirthmonkey@gmail.com>
Diffstat (limited to 'keystonemiddleware-moon/keystonemiddleware')
-rw-r--r--keystonemiddleware-moon/keystonemiddleware/__init__.py0
-rw-r--r--keystonemiddleware-moon/keystonemiddleware/audit.py430
-rw-r--r--keystonemiddleware-moon/keystonemiddleware/auth_token/__init__.py1171
-rw-r--r--keystonemiddleware-moon/keystonemiddleware/auth_token/_auth.py181
-rw-r--r--keystonemiddleware-moon/keystonemiddleware/auth_token/_base.py13
-rw-r--r--keystonemiddleware-moon/keystonemiddleware/auth_token/_cache.py367
-rw-r--r--keystonemiddleware-moon/keystonemiddleware/auth_token/_exceptions.py27
-rw-r--r--keystonemiddleware-moon/keystonemiddleware/auth_token/_identity.py243
-rw-r--r--keystonemiddleware-moon/keystonemiddleware/auth_token/_memcache_crypt.py210
-rw-r--r--keystonemiddleware-moon/keystonemiddleware/auth_token/_memcache_pool.py184
-rw-r--r--keystonemiddleware-moon/keystonemiddleware/auth_token/_revocations.py106
-rw-r--r--keystonemiddleware-moon/keystonemiddleware/auth_token/_signing_dir.py83
-rw-r--r--keystonemiddleware-moon/keystonemiddleware/auth_token/_user_plugin.py169
-rw-r--r--keystonemiddleware-moon/keystonemiddleware/auth_token/_utils.py32
-rw-r--r--keystonemiddleware-moon/keystonemiddleware/authz.py326
-rw-r--r--keystonemiddleware-moon/keystonemiddleware/ec2_token.py130
-rw-r--r--keystonemiddleware-moon/keystonemiddleware/i18n.py37
-rw-r--r--keystonemiddleware-moon/keystonemiddleware/openstack/__init__.py0
-rw-r--r--keystonemiddleware-moon/keystonemiddleware/openstack/common/__init__.py0
-rw-r--r--keystonemiddleware-moon/keystonemiddleware/openstack/common/memorycache.py97
-rw-r--r--keystonemiddleware-moon/keystonemiddleware/opts.py52
-rw-r--r--keystonemiddleware-moon/keystonemiddleware/s3_token.py267
-rw-r--r--keystonemiddleware-moon/keystonemiddleware/tests/__init__.py0
-rw-r--r--keystonemiddleware-moon/keystonemiddleware/tests/unit/__init__.py0
-rw-r--r--keystonemiddleware-moon/keystonemiddleware/tests/unit/auth_token/__init__.py0
-rw-r--r--keystonemiddleware-moon/keystonemiddleware/tests/unit/auth_token/test_auth.py102
-rw-r--r--keystonemiddleware-moon/keystonemiddleware/tests/unit/auth_token/test_auth_token_middleware.py2763
-rw-r--r--keystonemiddleware-moon/keystonemiddleware/tests/unit/auth_token/test_connection_pool.py118
-rw-r--r--keystonemiddleware-moon/keystonemiddleware/tests/unit/auth_token/test_memcache_crypt.py97
-rw-r--r--keystonemiddleware-moon/keystonemiddleware/tests/unit/auth_token/test_revocations.py65
-rw-r--r--keystonemiddleware-moon/keystonemiddleware/tests/unit/auth_token/test_signing_dir.py138
-rw-r--r--keystonemiddleware-moon/keystonemiddleware/tests/unit/auth_token/test_utils.py37
-rw-r--r--keystonemiddleware-moon/keystonemiddleware/tests/unit/client_fixtures.py452
-rw-r--r--keystonemiddleware-moon/keystonemiddleware/tests/unit/test_audit_middleware.py485
-rw-r--r--keystonemiddleware-moon/keystonemiddleware/tests/unit/test_opts.py85
-rw-r--r--keystonemiddleware-moon/keystonemiddleware/tests/unit/test_s3_token_middleware.py235
-rw-r--r--keystonemiddleware-moon/keystonemiddleware/tests/unit/utils.py138
37 files changed, 8840 insertions, 0 deletions
diff --git a/keystonemiddleware-moon/keystonemiddleware/__init__.py b/keystonemiddleware-moon/keystonemiddleware/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/keystonemiddleware-moon/keystonemiddleware/__init__.py
diff --git a/keystonemiddleware-moon/keystonemiddleware/audit.py b/keystonemiddleware-moon/keystonemiddleware/audit.py
new file mode 100644
index 00000000..f44da80d
--- /dev/null
+++ b/keystonemiddleware-moon/keystonemiddleware/audit.py
@@ -0,0 +1,430 @@
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""
+Build open standard audit information based on incoming requests
+
+AuditMiddleware filter should be placed after keystonemiddleware.auth_token
+in the pipeline so that it can utilise the information the Identity server
+provides.
+"""
+
+import ast
+import collections
+import functools
+import logging
+import os.path
+import re
+import sys
+
+from oslo_config import cfg
+from oslo_context import context
+try:
+ import oslo.messaging
+ messaging = True
+except ImportError:
+ messaging = False
+from pycadf import cadftaxonomy as taxonomy
+from pycadf import cadftype
+from pycadf import credential
+from pycadf import endpoint
+from pycadf import eventfactory as factory
+from pycadf import host
+from pycadf import identifier
+from pycadf import reason
+from pycadf import reporterstep
+from pycadf import resource
+from pycadf import tag
+from pycadf import timestamp
+from six.moves import configparser
+from six.moves.urllib import parse as urlparse
+import webob.dec
+
+from keystonemiddleware.i18n import _LE, _LI
+
+
+_LOG = None
+
+
+def _log_and_ignore_error(fn):
+ @functools.wraps(fn)
+ def wrapper(*args, **kwargs):
+ try:
+ return fn(*args, **kwargs)
+ except Exception as e:
+ _LOG.exception(_LE('An exception occurred processing '
+ 'the API call: %s '), e)
+ return wrapper
+
+
+Service = collections.namedtuple('Service',
+ ['id', 'name', 'type', 'admin_endp',
+ 'public_endp', 'private_endp'])
+
+
+AuditMap = collections.namedtuple('AuditMap',
+ ['path_kw',
+ 'custom_actions',
+ 'service_endpoints',
+ 'default_target_endpoint_type'])
+
+
+class OpenStackAuditApi(object):
+
+ def __init__(self, cfg_file):
+ """Configure to recognize and map known api paths."""
+ path_kw = {}
+ custom_actions = {}
+ endpoints = {}
+ default_target_endpoint_type = None
+
+ if cfg_file:
+ try:
+ map_conf = configparser.SafeConfigParser()
+ map_conf.readfp(open(cfg_file))
+
+ try:
+ default_target_endpoint_type = map_conf.get(
+ 'DEFAULT', 'target_endpoint_type')
+ except configparser.NoOptionError:
+ pass
+
+ try:
+ custom_actions = dict(map_conf.items('custom_actions'))
+ except configparser.Error:
+ pass
+
+ try:
+ path_kw = dict(map_conf.items('path_keywords'))
+ except configparser.Error:
+ pass
+
+ try:
+ endpoints = dict(map_conf.items('service_endpoints'))
+ except configparser.Error:
+ pass
+ except configparser.ParsingError as err:
+ raise PycadfAuditApiConfigError(
+ 'Error parsing audit map file: %s' % err)
+ self._MAP = AuditMap(
+ path_kw=path_kw, custom_actions=custom_actions,
+ service_endpoints=endpoints,
+ default_target_endpoint_type=default_target_endpoint_type)
+
+ @staticmethod
+ def _clean_path(value):
+ """Clean path if path has json suffix."""
+ return value[:-5] if value.endswith('.json') else value
+
+ def get_action(self, req):
+ """Take a given Request, parse url path to calculate action type.
+
+ Depending on req.method:
+ if POST: path ends with 'action', read the body and use as action;
+ path ends with known custom_action, take action from config;
+ request ends with known path, assume is create action;
+ request ends with unknown path, assume is update action.
+ if GET: request ends with known path, assume is list action;
+ request ends with unknown path, assume is read action.
+ if PUT, assume update action.
+ if DELETE, assume delete action.
+ if HEAD, assume read action.
+
+ """
+ path = req.path[:-1] if req.path.endswith('/') else req.path
+ url_ending = self._clean_path(path[path.rfind('/') + 1:])
+ method = req.method
+
+ if url_ending + '/' + method.lower() in self._MAP.custom_actions:
+ action = self._MAP.custom_actions[url_ending + '/' +
+ method.lower()]
+ elif url_ending in self._MAP.custom_actions:
+ action = self._MAP.custom_actions[url_ending]
+ elif method == 'POST':
+ if url_ending == 'action':
+ try:
+ if req.json:
+ body_action = list(req.json.keys())[0]
+ action = taxonomy.ACTION_UPDATE + '/' + body_action
+ else:
+ action = taxonomy.ACTION_CREATE
+ except ValueError:
+ action = taxonomy.ACTION_CREATE
+ elif url_ending not in self._MAP.path_kw:
+ action = taxonomy.ACTION_UPDATE
+ else:
+ action = taxonomy.ACTION_CREATE
+ elif method == 'GET':
+ if url_ending in self._MAP.path_kw:
+ action = taxonomy.ACTION_LIST
+ else:
+ action = taxonomy.ACTION_READ
+ elif method == 'PUT' or method == 'PATCH':
+ action = taxonomy.ACTION_UPDATE
+ elif method == 'DELETE':
+ action = taxonomy.ACTION_DELETE
+ elif method == 'HEAD':
+ action = taxonomy.ACTION_READ
+ else:
+ action = taxonomy.UNKNOWN
+
+ return action
+
+ def _get_service_info(self, endp):
+ service = Service(
+ type=self._MAP.service_endpoints.get(
+ endp['type'],
+ taxonomy.UNKNOWN),
+ name=endp['name'],
+ id=identifier.norm_ns(endp['endpoints'][0].get('id',
+ endp['name'])),
+ admin_endp=endpoint.Endpoint(
+ name='admin',
+ url=endp['endpoints'][0]['adminURL']),
+ private_endp=endpoint.Endpoint(
+ name='private',
+ url=endp['endpoints'][0]['internalURL']),
+ public_endp=endpoint.Endpoint(
+ name='public',
+ url=endp['endpoints'][0]['publicURL']))
+
+ return service
+
+ def _build_typeURI(self, req, service_type):
+ """Build typeURI of target
+
+ Combines service type and corresponding path for greater detail.
+ """
+ type_uri = ''
+ prev_key = None
+ for key in re.split('/', req.path):
+ key = self._clean_path(key)
+ if key in self._MAP.path_kw:
+ type_uri += '/' + key
+ elif prev_key in self._MAP.path_kw:
+ type_uri += '/' + self._MAP.path_kw[prev_key]
+ prev_key = key
+ return service_type + type_uri
+
+ def _build_target(self, req, service):
+ """Build target resource."""
+ target_typeURI = (
+ self._build_typeURI(req, service.type)
+ if service.type != taxonomy.UNKNOWN else service.type)
+ target = resource.Resource(typeURI=target_typeURI,
+ id=service.id, name=service.name)
+ if service.admin_endp:
+ target.add_address(service.admin_endp)
+ if service.private_endp:
+ target.add_address(service.private_endp)
+ if service.public_endp:
+ target.add_address(service.public_endp)
+ return target
+
+ def get_target_resource(self, req):
+ """Retrieve target information
+
+ If discovery is enabled, target will attempt to retrieve information
+ from service catalog. If not, the information will be taken from
+ given config file.
+ """
+ service_info = Service(type=taxonomy.UNKNOWN, name=taxonomy.UNKNOWN,
+ id=taxonomy.UNKNOWN, admin_endp=None,
+ private_endp=None, public_endp=None)
+ try:
+ catalog = ast.literal_eval(
+ req.environ['HTTP_X_SERVICE_CATALOG'])
+ except KeyError:
+ raise PycadfAuditApiConfigError(
+ 'Service catalog is missing. '
+ 'Cannot discover target information')
+
+ default_endpoint = None
+ for endp in catalog:
+ admin_urlparse = urlparse.urlparse(
+ endp['endpoints'][0]['adminURL'])
+ public_urlparse = urlparse.urlparse(
+ endp['endpoints'][0]['publicURL'])
+ req_url = urlparse.urlparse(req.host_url)
+ if (req_url.netloc == admin_urlparse.netloc
+ or req_url.netloc == public_urlparse.netloc):
+ service_info = self._get_service_info(endp)
+ break
+ elif (self._MAP.default_target_endpoint_type and
+ endp['type'] == self._MAP.default_target_endpoint_type):
+ default_endpoint = endp
+ else:
+ if default_endpoint:
+ service_info = self._get_service_info(default_endpoint)
+ return self._build_target(req, service_info)
+
+
+class ClientResource(resource.Resource):
+ def __init__(self, project_id=None, **kwargs):
+ super(ClientResource, self).__init__(**kwargs)
+ if project_id is not None:
+ self.project_id = project_id
+
+
+class KeystoneCredential(credential.Credential):
+ def __init__(self, identity_status=None, **kwargs):
+ super(KeystoneCredential, self).__init__(**kwargs)
+ if identity_status is not None:
+ self.identity_status = identity_status
+
+
+class PycadfAuditApiConfigError(Exception):
+ """Error raised when pyCADF fails to configure correctly."""
+
+
+class AuditMiddleware(object):
+ """Create an audit event based on request/response.
+
+ The audit middleware takes in various configuration options such as the
+ ability to skip audit of certain requests. The full list of options can
+ be discovered here:
+ http://docs.openstack.org/developer/keystonemiddleware/audit.html
+ """
+
+ @staticmethod
+ def _get_aliases(proj):
+ aliases = {}
+ if proj:
+ # Aliases to support backward compatibility
+ aliases = {
+ '%s.openstack.common.rpc.impl_kombu' % proj: 'rabbit',
+ '%s.openstack.common.rpc.impl_qpid' % proj: 'qpid',
+ '%s.openstack.common.rpc.impl_zmq' % proj: 'zmq',
+ '%s.rpc.impl_kombu' % proj: 'rabbit',
+ '%s.rpc.impl_qpid' % proj: 'qpid',
+ '%s.rpc.impl_zmq' % proj: 'zmq',
+ }
+ return aliases
+
+ def __init__(self, app, **conf):
+ self._application = app
+ global _LOG
+ _LOG = logging.getLogger(conf.get('log_name', __name__))
+ self._service_name = conf.get('service_name')
+ self._ignore_req_list = [x.upper().strip() for x in
+ conf.get('ignore_req_list', '').split(',')]
+ self._cadf_audit = OpenStackAuditApi(conf.get('audit_map_file'))
+
+ transport_aliases = self._get_aliases(cfg.CONF.project)
+ if messaging:
+ self._notifier = oslo.messaging.Notifier(
+ oslo.messaging.get_transport(cfg.CONF,
+ aliases=transport_aliases),
+ os.path.basename(sys.argv[0]))
+
+ def _emit_audit(self, context, event_type, payload):
+ """Emit audit notification
+
+ if oslo.messaging enabled, send notification. if not, log event.
+ """
+
+ if messaging:
+ self._notifier.info(context, event_type, payload)
+ else:
+ _LOG.info(_LI('Event type: %(event_type)s, Context: %(context)s, '
+ 'Payload: %(payload)s'), {'context': context,
+ 'event_type': event_type,
+ 'payload': payload})
+
+ def _create_event(self, req):
+ correlation_id = identifier.generate_uuid()
+ action = self._cadf_audit.get_action(req)
+
+ initiator = ClientResource(
+ typeURI=taxonomy.ACCOUNT_USER,
+ id=identifier.norm_ns(str(req.environ['HTTP_X_USER_ID'])),
+ name=req.environ['HTTP_X_USER_NAME'],
+ host=host.Host(address=req.client_addr, agent=req.user_agent),
+ credential=KeystoneCredential(
+ token=req.environ['HTTP_X_AUTH_TOKEN'],
+ identity_status=req.environ['HTTP_X_IDENTITY_STATUS']),
+ project_id=identifier.norm_ns(req.environ['HTTP_X_PROJECT_ID']))
+ target = self._cadf_audit.get_target_resource(req)
+
+ event = factory.EventFactory().new_event(
+ eventType=cadftype.EVENTTYPE_ACTIVITY,
+ outcome=taxonomy.OUTCOME_PENDING,
+ action=action,
+ initiator=initiator,
+ target=target,
+ observer=resource.Resource(id='target'))
+ event.requestPath = req.path_qs
+ event.add_tag(tag.generate_name_value_tag('correlation_id',
+ correlation_id))
+ # cache model in request to allow tracking of transistive steps.
+ req.environ['cadf_event'] = event
+ return event
+
+ @_log_and_ignore_error
+ def _process_request(self, request):
+ event = self._create_event(request)
+
+ self._emit_audit(context.get_admin_context().to_dict(),
+ 'audit.http.request', event.as_dict())
+
+ @_log_and_ignore_error
+ def _process_response(self, request, response=None):
+ # NOTE(gordc): handle case where error processing request
+ if 'cadf_event' not in request.environ:
+ self._create_event(request)
+ event = request.environ['cadf_event']
+
+ if response:
+ if response.status_int >= 200 and response.status_int < 400:
+ result = taxonomy.OUTCOME_SUCCESS
+ else:
+ result = taxonomy.OUTCOME_FAILURE
+ event.reason = reason.Reason(
+ reasonType='HTTP', reasonCode=str(response.status_int))
+ else:
+ result = taxonomy.UNKNOWN
+
+ event.outcome = result
+ event.add_reporterstep(
+ reporterstep.Reporterstep(
+ role=cadftype.REPORTER_ROLE_MODIFIER,
+ reporter=resource.Resource(id='target'),
+ reporterTime=timestamp.get_utc_now()))
+
+ self._emit_audit(context.get_admin_context().to_dict(),
+ 'audit.http.response', event.as_dict())
+
+ @webob.dec.wsgify
+ def __call__(self, req):
+ if req.method in self._ignore_req_list:
+ return req.get_response(self._application)
+
+ self._process_request(req)
+ try:
+ response = req.get_response(self._application)
+ except Exception:
+ self._process_response(req)
+ raise
+ else:
+ self._process_response(req, response)
+ return response
+
+
+def filter_factory(global_conf, **local_conf):
+ """Returns a WSGI filter app for use with paste.deploy."""
+ conf = global_conf.copy()
+ conf.update(local_conf)
+
+ def audit_filter(app):
+ return AuditMiddleware(app, **conf)
+ return audit_filter
diff --git a/keystonemiddleware-moon/keystonemiddleware/auth_token/__init__.py b/keystonemiddleware-moon/keystonemiddleware/auth_token/__init__.py
new file mode 100644
index 00000000..80539714
--- /dev/null
+++ b/keystonemiddleware-moon/keystonemiddleware/auth_token/__init__.py
@@ -0,0 +1,1171 @@
+# Copyright 2010-2012 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+Token-based Authentication Middleware
+
+This WSGI component:
+
+* Verifies that incoming client requests have valid tokens by validating
+ tokens with the auth service.
+* Rejects unauthenticated requests unless the auth_token middleware is in
+ 'delay_auth_decision' mode, which means the final decision is delegated to
+ the downstream WSGI component (usually the OpenStack service).
+* Collects and forwards identity information based on a valid token
+ such as user name, tenant, etc
+
+Refer to: http://docs.openstack.org/developer/keystonemiddleware/\
+middlewarearchitecture.html
+
+
+Echo test server
+----------------
+
+Run this module directly to start a protected echo service on port 8000::
+
+ $ python -m keystonemiddleware.auth_token
+
+When the ``auth_token`` module authenticates a request, the echo service
+will respond with all the environment variables presented to it by this
+module.
+
+
+Headers
+-------
+
+The auth_token middleware uses headers sent in by the client on the request
+and sets headers and environment variables for the downstream WSGI component.
+
+Coming in from initial call from client or customer
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+HTTP_X_AUTH_TOKEN
+ The client token being passed in.
+
+HTTP_X_SERVICE_TOKEN
+ A service token being passed in.
+
+Used for communication between components
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+WWW-Authenticate
+ HTTP header returned to a user indicating which endpoint to use
+ to retrieve a new token
+
+What auth_token adds to the request for use by the OpenStack service
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+When using composite authentication (a user and service token are
+present) additional service headers relating to the service user
+will be added. They take the same form as the standard headers but add
+'_SERVICE_'. These headers will not exist in the environment if no
+service token is present.
+
+HTTP_X_IDENTITY_STATUS, HTTP_X_SERVICE_IDENTITY_STATUS
+ 'Confirmed' or 'Invalid'
+ The underlying service will only see a value of 'Invalid' if the Middleware
+ is configured to run in 'delay_auth_decision' mode. As with all such
+ headers, HTTP_X_SERVICE_IDENTITY_STATUS will only exist in the
+ environment if a service token is presented. This is different than
+ HTTP_X_IDENTITY_STATUS which is always set even if no user token is
+ presented. This allows the underlying service to determine if a
+ denial should use 401 or 403.
+
+HTTP_X_DOMAIN_ID, HTTP_X_SERVICE_DOMAIN_ID
+ Identity service managed unique identifier, string. Only present if
+ this is a domain-scoped v3 token.
+
+HTTP_X_DOMAIN_NAME, HTTP_X_SERVICE_DOMAIN_NAME
+ Unique domain name, string. Only present if this is a domain-scoped
+ v3 token.
+
+HTTP_X_PROJECT_ID, HTTP_X_SERVICE_PROJECT_ID
+ Identity service managed unique identifier, string. Only present if
+ this is a project-scoped v3 token, or a tenant-scoped v2 token.
+
+HTTP_X_PROJECT_NAME, HTTP_X_SERVICE_PROJECT_NAME
+ Project name, unique within owning domain, string. Only present if
+ this is a project-scoped v3 token, or a tenant-scoped v2 token.
+
+HTTP_X_PROJECT_DOMAIN_ID, HTTP_X_SERVICE_PROJECT_DOMAIN_ID
+ Identity service managed unique identifier of owning domain of
+ project, string. Only present if this is a project-scoped v3 token. If
+ this variable is set, this indicates that the PROJECT_NAME can only
+ be assumed to be unique within this domain.
+
+HTTP_X_PROJECT_DOMAIN_NAME, HTTP_X_SERVICE_PROJECT_DOMAIN_NAME
+ Name of owning domain of project, string. Only present if this is a
+ project-scoped v3 token. If this variable is set, this indicates that
+ the PROJECT_NAME can only be assumed to be unique within this domain.
+
+HTTP_X_USER_ID, HTTP_X_SERVICE_USER_ID
+ Identity-service managed unique identifier, string
+
+HTTP_X_USER_NAME, HTTP_X_SERVICE_USER_NAME
+ User identifier, unique within owning domain, string
+
+HTTP_X_USER_DOMAIN_ID, HTTP_X_SERVICE_USER_DOMAIN_ID
+ Identity service managed unique identifier of owning domain of
+ user, string. If this variable is set, this indicates that the USER_NAME
+ can only be assumed to be unique within this domain.
+
+HTTP_X_USER_DOMAIN_NAME, HTTP_X_SERVICE_USER_DOMAIN_NAME
+ Name of owning domain of user, string. If this variable is set, this
+ indicates that the USER_NAME can only be assumed to be unique within
+ this domain.
+
+HTTP_X_ROLES, HTTP_X_SERVICE_ROLES
+ Comma delimited list of case-sensitive role names
+
+HTTP_X_SERVICE_CATALOG
+ json encoded service catalog (optional).
+ For compatibility reasons this catalog will always be in the V2 catalog
+ format even if it is a v3 token.
+
+ Note: This is an exception in that it contains 'SERVICE' but relates to a
+ user token, not a service token. The existing user's
+ catalog can be very large; it was decided not to present a catalog
+ relating to the service token to avoid using more HTTP header space.
+
+HTTP_X_TENANT_ID
+ *Deprecated* in favor of HTTP_X_PROJECT_ID
+ Identity service managed unique identifier, string. For v3 tokens, this
+ will be set to the same value as HTTP_X_PROJECT_ID
+
+HTTP_X_TENANT_NAME
+ *Deprecated* in favor of HTTP_X_PROJECT_NAME
+ Project identifier, unique within owning domain, string. For v3 tokens,
+ this will be set to the same value as HTTP_X_PROJECT_NAME
+
+HTTP_X_TENANT
+ *Deprecated* in favor of HTTP_X_TENANT_ID and HTTP_X_TENANT_NAME
+ identity server-assigned unique identifier, string. For v3 tokens, this
+ will be set to the same value as HTTP_X_PROJECT_ID
+
+HTTP_X_USER
+ *Deprecated* in favor of HTTP_X_USER_ID and HTTP_X_USER_NAME
+ User name, unique within owning domain, string
+
+HTTP_X_ROLE
+ *Deprecated* in favor of HTTP_X_ROLES
+ Will contain the same values as HTTP_X_ROLES.
+
+Environment Variables
+^^^^^^^^^^^^^^^^^^^^^
+
+These variables are set in the request environment for use by the downstream
+WSGI component.
+
+keystone.token_info
+ Information about the token discovered in the process of validation. This
+ may include extended information returned by the token validation call, as
+ well as basic information about the tenant and user.
+
+keystone.token_auth
+ A keystoneclient auth plugin that may be used with a
+ :py:class:`keystoneclient.session.Session`. This plugin will load the
+ authentication data provided to auth_token middleware.
+
+
+Configuration
+-------------
+
+Middleware configuration can be in the main application's configuration file,
+e.g. in ``nova.conf``:
+
+.. code-block:: ini
+
+ [keystone_authtoken]
+ auth_plugin = password
+ auth_url = http://keystone:35357/
+ username = nova
+ user_domain_id = default
+ password = whyarewestillusingpasswords
+ project_name = service
+ project_domain_id = default
+
+Configuration can also be in the ``api-paste.ini`` file with the same options,
+but this is discouraged.
+
+Swift
+-----
+
+When deploy Keystone auth_token middleware with Swift, user may elect to use
+Swift memcache instead of the local auth_token memcache. Swift memcache is
+passed in from the request environment and it's identified by the
+``swift.cache`` key. However it could be different, depending on deployment. To
+use Swift memcache, you must set the ``cache`` option to the environment key
+where the Swift cache object is stored.
+
+"""
+
+import datetime
+import logging
+
+from keystoneclient import access
+from keystoneclient import adapter
+from keystoneclient import auth
+from keystoneclient.common import cms
+from keystoneclient import discover
+from keystoneclient import exceptions
+from keystoneclient import session
+from oslo_config import cfg
+from oslo_serialization import jsonutils
+from oslo_utils import timeutils
+import six
+
+from keystonemiddleware.auth_token import _auth
+from keystonemiddleware.auth_token import _base
+from keystonemiddleware.auth_token import _cache
+from keystonemiddleware.auth_token import _exceptions as exc
+from keystonemiddleware.auth_token import _identity
+from keystonemiddleware.auth_token import _revocations
+from keystonemiddleware.auth_token import _signing_dir
+from keystonemiddleware.auth_token import _user_plugin
+from keystonemiddleware.auth_token import _utils
+from keystonemiddleware.i18n import _, _LC, _LE, _LI, _LW
+
+
+# NOTE(jamielennox): A number of options below are deprecated however are left
+# in the list and only mentioned as deprecated in the help string. This is
+# because we have to provide the same deprecation functionality for arguments
+# passed in via the conf in __init__ (from paste) and there is no way to test
+# that the default value was set or not in CONF.
+# Also if we were to remove the options from the CONF list (as typical CONF
+# deprecation works) then other projects will not be able to override the
+# options via CONF.
+
+_OPTS = [
+ cfg.StrOpt('auth_uri',
+ default=None,
+ # FIXME(dolph): should be default='http://127.0.0.1:5000/v2.0/',
+ # or (depending on client support) an unversioned, publicly
+ # accessible identity endpoint (see bug 1207517)
+ help='Complete public Identity API endpoint.'),
+ cfg.StrOpt('auth_version',
+ default=None,
+ help='API version of the admin Identity API endpoint.'),
+ cfg.BoolOpt('delay_auth_decision',
+ default=False,
+ help='Do not handle authorization requests within the'
+ ' middleware, but delegate the authorization decision to'
+ ' downstream WSGI components.'),
+ cfg.IntOpt('http_connect_timeout',
+ default=None,
+ help='Request timeout value for communicating with Identity'
+ ' API server.'),
+ cfg.IntOpt('http_request_max_retries',
+ default=3,
+ help='How many times are we trying to reconnect when'
+ ' communicating with Identity API Server.'),
+ cfg.StrOpt('cache',
+ default=None,
+ help='Env key for the swift cache.'),
+ cfg.StrOpt('certfile',
+ help='Required if identity server requires client certificate'),
+ cfg.StrOpt('keyfile',
+ help='Required if identity server requires client certificate'),
+ cfg.StrOpt('cafile', default=None,
+ help='A PEM encoded Certificate Authority to use when '
+ 'verifying HTTPs connections. Defaults to system CAs.'),
+ cfg.BoolOpt('insecure', default=False, help='Verify HTTPS connections.'),
+ cfg.StrOpt('signing_dir',
+ help='Directory used to cache files related to PKI tokens.'),
+ cfg.ListOpt('memcached_servers',
+ deprecated_name='memcache_servers',
+ help='Optionally specify a list of memcached server(s) to'
+ ' use for caching. If left undefined, tokens will instead be'
+ ' cached in-process.'),
+ cfg.IntOpt('token_cache_time',
+ default=300,
+ help='In order to prevent excessive effort spent validating'
+ ' tokens, the middleware caches previously-seen tokens for a'
+ ' configurable duration (in seconds). Set to -1 to disable'
+ ' caching completely.'),
+ cfg.IntOpt('revocation_cache_time',
+ default=10,
+ help='Determines the frequency at which the list of revoked'
+ ' tokens is retrieved from the Identity service (in seconds). A'
+ ' high number of revocation events combined with a low cache'
+ ' duration may significantly reduce performance.'),
+ cfg.StrOpt('memcache_security_strategy',
+ default=None,
+ help='(Optional) If defined, indicate whether token data'
+ ' should be authenticated or authenticated and encrypted.'
+ ' Acceptable values are MAC or ENCRYPT. If MAC, token data is'
+ ' authenticated (with HMAC) in the cache. If ENCRYPT, token'
+ ' data is encrypted and authenticated in the cache. If the'
+ ' value is not one of these options or empty, auth_token will'
+ ' raise an exception on initialization.'),
+ cfg.StrOpt('memcache_secret_key',
+ default=None,
+ secret=True,
+ help='(Optional, mandatory if memcache_security_strategy is'
+ ' defined) This string is used for key derivation.'),
+ cfg.IntOpt('memcache_pool_dead_retry',
+ default=5 * 60,
+ help='(Optional) Number of seconds memcached server is'
+ ' considered dead before it is tried again.'),
+ cfg.IntOpt('memcache_pool_maxsize',
+ default=10,
+ help='(Optional) Maximum total number of open connections to'
+ ' every memcached server.'),
+ cfg.IntOpt('memcache_pool_socket_timeout',
+ default=3,
+ help='(Optional) Socket timeout in seconds for communicating '
+ 'with a memcache server.'),
+ cfg.IntOpt('memcache_pool_unused_timeout',
+ default=60,
+ help='(Optional) Number of seconds a connection to memcached'
+ ' is held unused in the pool before it is closed.'),
+ cfg.IntOpt('memcache_pool_conn_get_timeout',
+ default=10,
+ help='(Optional) Number of seconds that an operation will wait '
+ 'to get a memcache client connection from the pool.'),
+ cfg.BoolOpt('memcache_use_advanced_pool',
+ default=False,
+ help='(Optional) Use the advanced (eventlet safe) memcache '
+ 'client pool. The advanced pool will only work under '
+ 'python 2.x.'),
+ cfg.BoolOpt('include_service_catalog',
+ default=True,
+ help='(Optional) Indicate whether to set the X-Service-Catalog'
+ ' header. If False, middleware will not ask for service'
+ ' catalog on token validation and will not set the'
+ ' X-Service-Catalog header.'),
+ cfg.StrOpt('enforce_token_bind',
+ default='permissive',
+ help='Used to control the use and type of token binding. Can'
+ ' be set to: "disabled" to not check token binding.'
+ ' "permissive" (default) to validate binding information if the'
+ ' bind type is of a form known to the server and ignore it if'
+ ' not. "strict" like "permissive" but if the bind type is'
+ ' unknown the token will be rejected. "required" any form of'
+ ' token binding is needed to be allowed. Finally the name of a'
+ ' binding method that must be present in tokens.'),
+ cfg.BoolOpt('check_revocations_for_cached', default=False,
+ help='If true, the revocation list will be checked for cached'
+ ' tokens. This requires that PKI tokens are configured on the'
+ ' identity server.'),
+ cfg.ListOpt('hash_algorithms', default=['md5'],
+ help='Hash algorithms to use for hashing PKI tokens. This may'
+ ' be a single algorithm or multiple. The algorithms are those'
+ ' supported by Python standard hashlib.new(). The hashes will'
+ ' be tried in the order given, so put the preferred one first'
+ ' for performance. The result of the first hash will be stored'
+ ' in the cache. This will typically be set to multiple values'
+ ' only while migrating from a less secure algorithm to a more'
+ ' secure one. Once all the old tokens are expired this option'
+ ' should be set to a single value for better performance.'),
+]
+
+CONF = cfg.CONF
+CONF.register_opts(_OPTS, group=_base.AUTHTOKEN_GROUP)
+
+_LOG = logging.getLogger(__name__)
+
+_HEADER_TEMPLATE = {
+ 'X%s-Domain-Id': 'domain_id',
+ 'X%s-Domain-Name': 'domain_name',
+ 'X%s-Project-Id': 'project_id',
+ 'X%s-Project-Name': 'project_name',
+ 'X%s-Project-Domain-Id': 'project_domain_id',
+ 'X%s-Project-Domain-Name': 'project_domain_name',
+ 'X%s-User-Id': 'user_id',
+ 'X%s-User-Name': 'username',
+ 'X%s-User-Domain-Id': 'user_domain_id',
+ 'X%s-User-Domain-Name': 'user_domain_name',
+}
+
+_DEPRECATED_HEADER_TEMPLATE = {
+ 'X-User': 'username',
+ 'X-Tenant-Id': 'project_id',
+ 'X-Tenant-Name': 'project_name',
+ 'X-Tenant': 'project_name',
+}
+
+
+class _BIND_MODE(object):
+ DISABLED = 'disabled'
+ PERMISSIVE = 'permissive'
+ STRICT = 'strict'
+ REQUIRED = 'required'
+ KERBEROS = 'kerberos'
+
+
+def _token_is_v2(token_info):
+ return ('access' in token_info)
+
+
+def _token_is_v3(token_info):
+ return ('token' in token_info)
+
+
+def _get_token_expiration(data):
+ if not data:
+ raise exc.InvalidToken(_('Token authorization failed'))
+ if _token_is_v2(data):
+ return data['access']['token']['expires']
+ elif _token_is_v3(data):
+ return data['token']['expires_at']
+ else:
+ raise exc.InvalidToken(_('Token authorization failed'))
+
+
+def _confirm_token_not_expired(expires):
+ expires = timeutils.parse_isotime(expires)
+ expires = timeutils.normalize_time(expires)
+ utcnow = timeutils.utcnow()
+ if utcnow >= expires:
+ raise exc.InvalidToken(_('Token authorization failed'))
+
+
+def _v3_to_v2_catalog(catalog):
+ """Convert a catalog to v2 format.
+
+ X_SERVICE_CATALOG must be specified in v2 format. If you get a token
+ that is in v3 convert it.
+ """
+ v2_services = []
+ for v3_service in catalog:
+ # first copy over the entries we allow for the service
+ v2_service = {'type': v3_service['type']}
+ try:
+ v2_service['name'] = v3_service['name']
+ except KeyError:
+ pass
+
+ # now convert the endpoints. Because in v3 we specify region per
+ # URL not per group we have to collect all the entries of the same
+ # region together before adding it to the new service.
+ regions = {}
+ for v3_endpoint in v3_service.get('endpoints', []):
+ region_name = v3_endpoint.get('region')
+ try:
+ region = regions[region_name]
+ except KeyError:
+ region = {'region': region_name} if region_name else {}
+ regions[region_name] = region
+
+ interface_name = v3_endpoint['interface'].lower() + 'URL'
+ region[interface_name] = v3_endpoint['url']
+
+ v2_service['endpoints'] = list(regions.values())
+ v2_services.append(v2_service)
+
+ return v2_services
+
+
+def _conf_values_type_convert(conf):
+ """Convert conf values into correct type."""
+ if not conf:
+ return {}
+
+ opt_types = {}
+ for o in (_OPTS + _auth.AuthTokenPlugin.get_options()):
+ type_dest = (getattr(o, 'type', str), o.dest)
+ opt_types[o.dest] = type_dest
+ # Also add the deprecated name with the same type and dest.
+ for d_o in o.deprecated_opts:
+ opt_types[d_o.name] = type_dest
+
+ opts = {}
+ for k, v in six.iteritems(conf):
+ dest = k
+ try:
+ if v is not None:
+ type_, dest = opt_types[k]
+ v = type_(v)
+ except KeyError:
+ # This option is not known to auth_token.
+ pass
+ except ValueError as e:
+ raise exc.ConfigurationError(
+ _('Unable to convert the value of %(key)s option into correct '
+ 'type: %(ex)s') % {'key': k, 'ex': e})
+ opts[dest] = v
+ return opts
+
+
+class AuthProtocol(object):
+ """Middleware that handles authenticating client calls."""
+
+ _SIGNING_CERT_FILE_NAME = 'signing_cert.pem'
+ _SIGNING_CA_FILE_NAME = 'cacert.pem'
+
+ def __init__(self, app, conf):
+ self._LOG = logging.getLogger(conf.get('log_name', __name__))
+ self._LOG.info(_LI('Starting Keystone auth_token middleware'))
+ # NOTE(wanghong): If options are set in paste file, all the option
+ # values passed into conf are string type. So, we should convert the
+ # conf value into correct type.
+ self._conf = _conf_values_type_convert(conf)
+ self._app = app
+
+ # delay_auth_decision means we still allow unauthenticated requests
+ # through and we let the downstream service make the final decision
+ self._delay_auth_decision = self._conf_get('delay_auth_decision')
+ self._include_service_catalog = self._conf_get(
+ 'include_service_catalog')
+
+ self._identity_server = self._create_identity_server()
+
+ self._auth_uri = self._conf_get('auth_uri')
+ if not self._auth_uri:
+ self._LOG.warning(
+ _LW('Configuring auth_uri to point to the public identity '
+ 'endpoint is required; clients may not be able to '
+ 'authenticate against an admin endpoint'))
+
+ # FIXME(dolph): drop support for this fallback behavior as
+ # documented in bug 1207517.
+
+ self._auth_uri = self._identity_server.auth_uri
+
+ self._signing_directory = _signing_dir.SigningDirectory(
+ directory_name=self._conf_get('signing_dir'), log=self._LOG)
+
+ self._token_cache = self._token_cache_factory()
+
+ revocation_cache_timeout = datetime.timedelta(
+ seconds=self._conf_get('revocation_cache_time'))
+ self._revocations = _revocations.Revocations(revocation_cache_timeout,
+ self._signing_directory,
+ self._identity_server,
+ self._cms_verify,
+ self._LOG)
+
+ self._check_revocations_for_cached = self._conf_get(
+ 'check_revocations_for_cached')
+ self._init_auth_headers()
+
+ def _conf_get(self, name, group=_base.AUTHTOKEN_GROUP):
+ # try config from paste-deploy first
+ if name in self._conf:
+ return self._conf[name]
+ else:
+ return CONF[group][name]
+
+ def _call_app(self, env, start_response):
+ # NOTE(jamielennox): We wrap the given start response so that if an
+ # application with a 'delay_auth_decision' setting fails, or otherwise
+ # raises Unauthorized that we include the Authentication URL headers.
+ def _fake_start_response(status, response_headers, exc_info=None):
+ if status.startswith('401'):
+ response_headers.extend(self._reject_auth_headers)
+
+ return start_response(status, response_headers, exc_info)
+
+ return self._app(env, _fake_start_response)
+
+ def __call__(self, env, start_response):
+ """Handle incoming request.
+
+ Authenticate send downstream on success. Reject request if
+ we can't authenticate.
+
+ """
+ def _fmt_msg(env):
+ msg = ('user: user_id %s, project_id %s, roles %s '
+ 'service: user_id %s, project_id %s, roles %s' % (
+ env.get('HTTP_X_USER_ID'), env.get('HTTP_X_PROJECT_ID'),
+ env.get('HTTP_X_ROLES'),
+ env.get('HTTP_X_SERVICE_USER_ID'),
+ env.get('HTTP_X_SERVICE_PROJECT_ID'),
+ env.get('HTTP_X_SERVICE_ROLES')))
+ return msg
+
+ self._token_cache.initialize(env)
+ self._remove_auth_headers(env)
+
+ try:
+ user_auth_ref = None
+ serv_auth_ref = None
+
+ try:
+ self._LOG.debug('Authenticating user token')
+ user_token = self._get_user_token_from_header(env)
+ user_token_info = self._validate_token(user_token, env)
+ user_auth_ref = access.AccessInfo.factory(
+ body=user_token_info,
+ auth_token=user_token)
+ env['keystone.token_info'] = user_token_info
+ user_headers = self._build_user_headers(user_auth_ref,
+ user_token_info)
+ self._add_headers(env, user_headers)
+ except exc.InvalidToken:
+ if self._delay_auth_decision:
+ self._LOG.info(
+ _LI('Invalid user token - deferring reject '
+ 'downstream'))
+ self._add_headers(env, {'X-Identity-Status': 'Invalid'})
+ else:
+ self._LOG.info(
+ _LI('Invalid user token - rejecting request'))
+ return self._reject_request(env, start_response)
+
+ try:
+ self._LOG.debug('Authenticating service token')
+ serv_token = self._get_service_token_from_header(env)
+ if serv_token is not None:
+ serv_token_info = self._validate_token(
+ serv_token, env)
+ serv_auth_ref = access.AccessInfo.factory(
+ body=serv_token_info,
+ auth_token=serv_token)
+ serv_headers = self._build_service_headers(serv_token_info)
+ self._add_headers(env, serv_headers)
+ except exc.InvalidToken:
+ if self._delay_auth_decision:
+ self._LOG.info(
+ _LI('Invalid service token - deferring reject '
+ 'downstream'))
+ self._add_headers(env,
+ {'X-Service-Identity-Status': 'Invalid'})
+ else:
+ self._LOG.info(
+ _LI('Invalid service token - rejecting request'))
+ return self._reject_request(env, start_response)
+
+ env['keystone.token_auth'] = _user_plugin.UserAuthPlugin(
+ user_auth_ref, serv_auth_ref)
+
+ except exc.ServiceError as e:
+ self._LOG.critical(_LC('Unable to obtain admin token: %s'), e)
+ return self._do_503_error(env, start_response)
+
+ self._LOG.debug("Received request from %s", _fmt_msg(env))
+
+ return self._call_app(env, start_response)
+
+ def _do_503_error(self, env, start_response):
+ resp = _utils.MiniResp('Service unavailable', env)
+ start_response('503 Service Unavailable', resp.headers)
+ return resp.body
+
+ def _init_auth_headers(self):
+ """Initialize auth header list.
+
+ Both user and service token headers are generated.
+ """
+ auth_headers = ['X-Service-Catalog',
+ 'X-Identity-Status',
+ 'X-Service-Identity-Status',
+ 'X-Roles',
+ 'X-Service-Roles']
+ for key in six.iterkeys(_HEADER_TEMPLATE):
+ auth_headers.append(key % '')
+ # Service headers
+ auth_headers.append(key % '-Service')
+
+ # Deprecated headers
+ auth_headers.append('X-Role')
+ for key in six.iterkeys(_DEPRECATED_HEADER_TEMPLATE):
+ auth_headers.append(key)
+
+ self._auth_headers = auth_headers
+
+ def _remove_auth_headers(self, env):
+ """Remove headers so a user can't fake authentication.
+
+ Both user and service token headers are removed.
+
+ :param env: wsgi request environment
+
+ """
+ self._LOG.debug('Removing headers from request environment: %s',
+ ','.join(self._auth_headers))
+ self._remove_headers(env, self._auth_headers)
+
+ def _get_user_token_from_header(self, env):
+ """Get token id from request.
+
+ :param env: wsgi request environment
+ :returns: token id
+ :raises exc.InvalidToken: if no token is provided in request
+
+ """
+ token = self._get_header(env, 'X-Auth-Token',
+ self._get_header(env, 'X-Storage-Token'))
+ if token:
+ return token
+ else:
+ if not self._delay_auth_decision:
+ self._LOG.warn(_LW('Unable to find authentication token'
+ ' in headers'))
+ self._LOG.debug('Headers: %s', env)
+ raise exc.InvalidToken(_('Unable to find token in headers'))
+
+ def _get_service_token_from_header(self, env):
+ """Get service token id from request.
+
+ :param env: wsgi request environment
+ :returns: service token id or None if not present
+
+ """
+ return self._get_header(env, 'X-Service-Token')
+
+ @property
+ def _reject_auth_headers(self):
+ header_val = 'Keystone uri=\'%s\'' % self._auth_uri
+ return [('WWW-Authenticate', header_val)]
+
+ def _reject_request(self, env, start_response):
+ """Redirect client to auth server.
+
+ :param env: wsgi request environment
+ :param start_response: wsgi response callback
+ :returns: HTTPUnauthorized http response
+
+ """
+ resp = _utils.MiniResp('Authentication required',
+ env, self._reject_auth_headers)
+ start_response('401 Unauthorized', resp.headers)
+ return resp.body
+
+ def _validate_token(self, token, env, retry=True):
+ """Authenticate user token
+
+ :param token: token id
+ :param env: wsgi environment
+ :param retry: Ignored, as it is not longer relevant
+ :returns: uncrypted body of the token if the token is valid
+ :raises exc.InvalidToken: if token is rejected
+
+ """
+ token_id = None
+
+ try:
+ token_ids, cached = self._token_cache.get(token)
+ token_id = token_ids[0]
+ if cached:
+ # Token was retrieved from the cache. In this case, there's no
+ # need to check that the token is expired because the cache
+ # fetch fails for an expired token. Also, there's no need to
+ # put the token in the cache because it's already in the cache.
+
+ data = cached
+
+ if self._check_revocations_for_cached:
+ # A token stored in Memcached might have been revoked
+ # regardless of initial mechanism used to validate it,
+ # and needs to be checked.
+ self._revocations.check(token_ids)
+ self._confirm_token_bind(data, env)
+ else:
+ verified = None
+ # Token wasn't cached. In this case, the token needs to be
+ # checked that it's not expired, and also put in the cache.
+ try:
+ if cms.is_pkiz(token):
+ verified = self._verify_pkiz_token(token, token_ids)
+ elif cms.is_asn1_token(token):
+ verified = self._verify_signed_token(token, token_ids)
+ except exceptions.CertificateConfigError:
+ self._LOG.warn(_LW('Fetch certificate config failed, '
+ 'fallback to online validation.'))
+ except exc.RevocationListError:
+ self._LOG.warn(_LW('Fetch revocation list failed, '
+ 'fallback to online validation.'))
+
+ if verified is not None:
+ data = jsonutils.loads(verified)
+ expires = _get_token_expiration(data)
+ _confirm_token_not_expired(expires)
+ else:
+ data = self._identity_server.verify_token(token, retry)
+ # No need to confirm token expiration here since
+ # verify_token fails for expired tokens.
+ expires = _get_token_expiration(data)
+ self._confirm_token_bind(data, env)
+ self._token_cache.store(token_id, data, expires)
+ return data
+ except (exceptions.ConnectionRefused, exceptions.RequestTimeout):
+ self._LOG.debug('Token validation failure.', exc_info=True)
+ self._LOG.warn(_LW('Authorization failed for token'))
+ raise exc.InvalidToken(_('Token authorization failed'))
+ except exc.ServiceError:
+ raise
+ except Exception:
+ self._LOG.debug('Token validation failure.', exc_info=True)
+ if token_id:
+ self._token_cache.store_invalid(token_id)
+ self._LOG.warn(_LW('Authorization failed for token'))
+ raise exc.InvalidToken(_('Token authorization failed'))
+
+ def _build_user_headers(self, auth_ref, token_info):
+ """Convert token object into headers.
+
+ Build headers that represent authenticated user - see main
+ doc info at start of file for details of headers to be defined.
+
+ :param token_info: token object returned by identity
+ server on authentication
+ :raises exc.InvalidToken: when unable to parse token object
+
+ """
+ roles = ','.join(auth_ref.role_names)
+
+ if _token_is_v2(token_info) and not auth_ref.project_id:
+ raise exc.InvalidToken(_('Unable to determine tenancy.'))
+
+ rval = {
+ 'X-Identity-Status': 'Confirmed',
+ 'X-Roles': roles,
+ }
+
+ for header_tmplt, attr in six.iteritems(_HEADER_TEMPLATE):
+ rval[header_tmplt % ''] = getattr(auth_ref, attr)
+
+ # Deprecated headers
+ rval['X-Role'] = roles
+ for header_tmplt, attr in six.iteritems(_DEPRECATED_HEADER_TEMPLATE):
+ rval[header_tmplt] = getattr(auth_ref, attr)
+
+ if self._include_service_catalog and auth_ref.has_service_catalog():
+ catalog = auth_ref.service_catalog.get_data()
+ if _token_is_v3(token_info):
+ catalog = _v3_to_v2_catalog(catalog)
+ rval['X-Service-Catalog'] = jsonutils.dumps(catalog)
+
+ return rval
+
+ def _build_service_headers(self, token_info):
+ """Convert token object into service headers.
+
+ Build headers that represent authenticated user - see main
+ doc info at start of file for details of headers to be defined.
+
+ :param token_info: token object returned by identity
+ server on authentication
+ :raises exc.InvalidToken: when unable to parse token object
+
+ """
+ auth_ref = access.AccessInfo.factory(body=token_info)
+
+ if _token_is_v2(token_info) and not auth_ref.project_id:
+ raise exc.InvalidToken(_('Unable to determine service tenancy.'))
+
+ roles = ','.join(auth_ref.role_names)
+ rval = {
+ 'X-Service-Identity-Status': 'Confirmed',
+ 'X-Service-Roles': roles,
+ }
+
+ header_type = '-Service'
+ for header_tmplt, attr in six.iteritems(_HEADER_TEMPLATE):
+ rval[header_tmplt % header_type] = getattr(auth_ref, attr)
+
+ return rval
+
+ def _header_to_env_var(self, key):
+ """Convert header to wsgi env variable.
+
+ :param key: http header name (ex. 'X-Auth-Token')
+ :returns: wsgi env variable name (ex. 'HTTP_X_AUTH_TOKEN')
+
+ """
+ return 'HTTP_%s' % key.replace('-', '_').upper()
+
+ def _add_headers(self, env, headers):
+ """Add http headers to environment."""
+ for (k, v) in six.iteritems(headers):
+ env_key = self._header_to_env_var(k)
+ env[env_key] = v
+
+ def _remove_headers(self, env, keys):
+ """Remove http headers from environment."""
+ for k in keys:
+ env_key = self._header_to_env_var(k)
+ try:
+ del env[env_key]
+ except KeyError:
+ pass
+
+ def _get_header(self, env, key, default=None):
+ """Get http header from environment."""
+ env_key = self._header_to_env_var(key)
+ return env.get(env_key, default)
+
+ def _invalid_user_token(self, msg=False):
+ # NOTE(jamielennox): use False as the default so that None is valid
+ if msg is False:
+ msg = _('Token authorization failed')
+
+ raise exc.InvalidToken(msg)
+
+ def _confirm_token_bind(self, data, env):
+ bind_mode = self._conf_get('enforce_token_bind')
+
+ if bind_mode == _BIND_MODE.DISABLED:
+ return
+
+ try:
+ if _token_is_v2(data):
+ bind = data['access']['token']['bind']
+ elif _token_is_v3(data):
+ bind = data['token']['bind']
+ else:
+ self._invalid_user_token()
+ except KeyError:
+ bind = {}
+
+ # permissive and strict modes don't require there to be a bind
+ permissive = bind_mode in (_BIND_MODE.PERMISSIVE, _BIND_MODE.STRICT)
+
+ if not bind:
+ if permissive:
+ # no bind provided and none required
+ return
+ else:
+ self._LOG.info(_LI('No bind information present in token.'))
+ self._invalid_user_token()
+
+ # get the named mode if bind_mode is not one of the predefined
+ if permissive or bind_mode == _BIND_MODE.REQUIRED:
+ name = None
+ else:
+ name = bind_mode
+
+ if name and name not in bind:
+ self._LOG.info(_LI('Named bind mode %s not in bind information'),
+ name)
+ self._invalid_user_token()
+
+ for bind_type, identifier in six.iteritems(bind):
+ if bind_type == _BIND_MODE.KERBEROS:
+ if not env.get('AUTH_TYPE', '').lower() == 'negotiate':
+ self._LOG.info(_LI('Kerberos credentials required and '
+ 'not present.'))
+ self._invalid_user_token()
+
+ if not env.get('REMOTE_USER') == identifier:
+ self._LOG.info(_LI('Kerberos credentials do not match '
+ 'those in bind.'))
+ self._invalid_user_token()
+
+ self._LOG.debug('Kerberos bind authentication successful.')
+
+ elif bind_mode == _BIND_MODE.PERMISSIVE:
+ self._LOG.debug('Ignoring Unknown bind for permissive mode: '
+ '%(bind_type)s: %(identifier)s.',
+ {'bind_type': bind_type,
+ 'identifier': identifier})
+
+ else:
+ self._LOG.info(
+ _LI('Couldn`t verify unknown bind: %(bind_type)s: '
+ '%(identifier)s.'),
+ {'bind_type': bind_type, 'identifier': identifier})
+ self._invalid_user_token()
+
+ def _cms_verify(self, data, inform=cms.PKI_ASN1_FORM):
+ """Verifies the signature of the provided data's IAW CMS syntax.
+
+ If either of the certificate files might be missing, fetch them and
+ retry.
+ """
+ def verify():
+ try:
+ signing_cert_path = self._signing_directory.calc_path(
+ self._SIGNING_CERT_FILE_NAME)
+ signing_ca_path = self._signing_directory.calc_path(
+ self._SIGNING_CA_FILE_NAME)
+ return cms.cms_verify(data, signing_cert_path,
+ signing_ca_path,
+ inform=inform).decode('utf-8')
+ except cms.subprocess.CalledProcessError as err:
+ self._LOG.warning(_LW('Verify error: %s'), err)
+ raise
+
+ try:
+ return verify()
+ except exceptions.CertificateConfigError:
+ # the certs might be missing; unconditionally fetch to avoid racing
+ self._fetch_signing_cert()
+ self._fetch_ca_cert()
+
+ try:
+ # retry with certs in place
+ return verify()
+ except exceptions.CertificateConfigError as err:
+ # if this is still occurring, something else is wrong and we
+ # need err.output to identify the problem
+ self._LOG.error(_LE('CMS Verify output: %s'), err.output)
+ raise
+
+ def _verify_signed_token(self, signed_text, token_ids):
+ """Check that the token is unrevoked and has a valid signature."""
+ self._revocations.check(token_ids)
+ formatted = cms.token_to_cms(signed_text)
+ verified = self._cms_verify(formatted)
+ return verified
+
+ def _verify_pkiz_token(self, signed_text, token_ids):
+ self._revocations.check(token_ids)
+ try:
+ uncompressed = cms.pkiz_uncompress(signed_text)
+ verified = self._cms_verify(uncompressed, inform=cms.PKIZ_CMS_FORM)
+ return verified
+ # TypeError If the signed_text is not zlib compressed
+ except TypeError:
+ raise exc.InvalidToken(signed_text)
+
+ def _fetch_signing_cert(self):
+ self._signing_directory.write_file(
+ self._SIGNING_CERT_FILE_NAME,
+ self._identity_server.fetch_signing_cert())
+
+ def _fetch_ca_cert(self):
+ self._signing_directory.write_file(
+ self._SIGNING_CA_FILE_NAME,
+ self._identity_server.fetch_ca_cert())
+
+ def _get_auth_plugin(self):
+ # NOTE(jamielennox): Ideally this would use get_from_conf_options
+ # however that is not possible because we have to support the override
+ # pattern we use in _conf_get. There is a somewhat replacement for this
+ # in keystoneclient in load_from_options_getter which should be used
+ # when available. Until then this is essentially a copy and paste of
+ # the ksc load_from_conf_options code because we need to get a fix out
+ # for this quickly.
+
+ # FIXME(jamielennox): update to use load_from_options_getter when
+ # https://review.openstack.org/162529 merges.
+
+ # !!! - UNDER NO CIRCUMSTANCES COPY ANY OF THIS CODE - !!!
+
+ group = self._conf_get('auth_section') or _base.AUTHTOKEN_GROUP
+ plugin_name = self._conf_get('auth_plugin', group=group)
+ plugin_kwargs = dict()
+
+ if plugin_name:
+ plugin_class = auth.get_plugin_class(plugin_name)
+ else:
+ plugin_class = _auth.AuthTokenPlugin
+ # logger object is a required parameter of the default plugin
+ plugin_kwargs['log'] = self._LOG
+
+ plugin_opts = plugin_class.get_options()
+ CONF.register_opts(plugin_opts, group=group)
+
+ for opt in plugin_opts:
+ val = self._conf_get(opt.dest, group=group)
+ if val is not None:
+ val = opt.type(val)
+ plugin_kwargs[opt.dest] = val
+
+ return plugin_class.load_from_options(**plugin_kwargs)
+
+ def _create_identity_server(self):
+ # NOTE(jamielennox): Loading Session here should be exactly the
+ # same as calling Session.load_from_conf_options(CONF, GROUP)
+ # however we can't do that because we have to use _conf_get to
+ # support the paste.ini options.
+ sess = session.Session.construct(dict(
+ cert=self._conf_get('certfile'),
+ key=self._conf_get('keyfile'),
+ cacert=self._conf_get('cafile'),
+ insecure=self._conf_get('insecure'),
+ timeout=self._conf_get('http_connect_timeout')
+ ))
+
+ auth_plugin = self._get_auth_plugin()
+
+ adap = adapter.Adapter(
+ sess,
+ auth=auth_plugin,
+ service_type='identity',
+ interface='admin',
+ connect_retries=self._conf_get('http_request_max_retries'))
+
+ auth_version = self._conf_get('auth_version')
+ if auth_version is not None:
+ auth_version = discover.normalize_version_number(auth_version)
+ return _identity.IdentityServer(
+ self._LOG,
+ adap,
+ include_service_catalog=self._include_service_catalog,
+ requested_auth_version=auth_version)
+
+ def _token_cache_factory(self):
+ security_strategy = self._conf_get('memcache_security_strategy')
+
+ cache_kwargs = dict(
+ cache_time=int(self._conf_get('token_cache_time')),
+ hash_algorithms=self._conf_get('hash_algorithms'),
+ env_cache_name=self._conf_get('cache'),
+ memcached_servers=self._conf_get('memcached_servers'),
+ use_advanced_pool=self._conf_get('memcache_use_advanced_pool'),
+ memcache_pool_dead_retry=self._conf_get(
+ 'memcache_pool_dead_retry'),
+ memcache_pool_maxsize=self._conf_get('memcache_pool_maxsize'),
+ memcache_pool_unused_timeout=self._conf_get(
+ 'memcache_pool_unused_timeout'),
+ memcache_pool_conn_get_timeout=self._conf_get(
+ 'memcache_pool_conn_get_timeout'),
+ memcache_pool_socket_timeout=self._conf_get(
+ 'memcache_pool_socket_timeout'),
+ )
+
+ if security_strategy:
+ secret_key = self._conf_get('memcache_secret_key')
+ return _cache.SecureTokenCache(self._LOG,
+ security_strategy,
+ secret_key,
+ **cache_kwargs)
+ else:
+ return _cache.TokenCache(self._LOG, **cache_kwargs)
+
+
+def filter_factory(global_conf, **local_conf):
+ """Returns a WSGI filter app for use with paste.deploy."""
+ conf = global_conf.copy()
+ conf.update(local_conf)
+
+ def auth_filter(app):
+ return AuthProtocol(app, conf)
+ return auth_filter
+
+
+def app_factory(global_conf, **local_conf):
+ conf = global_conf.copy()
+ conf.update(local_conf)
+ return AuthProtocol(None, conf)
+
+
+if __name__ == '__main__':
+ def echo_app(environ, start_response):
+ """A WSGI application that echoes the CGI environment to the user."""
+ start_response('200 OK', [('Content-Type', 'application/json')])
+ environment = dict((k, v) for k, v in six.iteritems(environ)
+ if k.startswith('HTTP_X_'))
+ yield jsonutils.dumps(environment)
+
+ from wsgiref import simple_server
+
+ # hardcode any non-default configuration here
+ conf = {'auth_protocol': 'http', 'admin_token': 'ADMIN'}
+ app = AuthProtocol(echo_app, conf)
+ server = simple_server.make_server('', 8000, app)
+ print('Serving on port 8000 (Ctrl+C to end)...')
+ server.serve_forever()
+
+
+# NOTE(jamielennox): Maintained here for public API compatibility.
+InvalidToken = exc.InvalidToken
+ServiceError = exc.ServiceError
+ConfigurationError = exc.ConfigurationError
+RevocationListError = exc.RevocationListError
diff --git a/keystonemiddleware-moon/keystonemiddleware/auth_token/_auth.py b/keystonemiddleware-moon/keystonemiddleware/auth_token/_auth.py
new file mode 100644
index 00000000..acc32ca5
--- /dev/null
+++ b/keystonemiddleware-moon/keystonemiddleware/auth_token/_auth.py
@@ -0,0 +1,181 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import logging
+
+from keystoneclient import auth
+from keystoneclient.auth.identity import v2
+from keystoneclient.auth import token_endpoint
+from keystoneclient import discover
+from oslo_config import cfg
+
+from keystonemiddleware.auth_token import _base
+from keystonemiddleware.i18n import _, _LW
+
+
+_LOG = logging.getLogger(__name__)
+
+
+class AuthTokenPlugin(auth.BaseAuthPlugin):
+
+ def __init__(self, auth_host, auth_port, auth_protocol, auth_admin_prefix,
+ admin_user, admin_password, admin_tenant_name, admin_token,
+ identity_uri, log):
+ # NOTE(jamielennox): it does appear here that our default arguments
+ # are backwards. We need to do it this way so that we can handle the
+ # same deprecation strategy for CONF and the conf variable.
+ if not identity_uri:
+ log.warning(_LW('Configuring admin URI using auth fragments. '
+ 'This is deprecated, use \'identity_uri\''
+ ' instead.'))
+
+ if ':' in auth_host:
+ # Note(dzyu) it is an IPv6 address, so it needs to be wrapped
+ # with '[]' to generate a valid IPv6 URL, based on
+ # http://www.ietf.org/rfc/rfc2732.txt
+ auth_host = '[%s]' % auth_host
+
+ identity_uri = '%s://%s:%s' % (auth_protocol,
+ auth_host,
+ auth_port)
+
+ if auth_admin_prefix:
+ identity_uri = '%s/%s' % (identity_uri,
+ auth_admin_prefix.strip('/'))
+
+ self._identity_uri = identity_uri.rstrip('/')
+
+ # FIXME(jamielennox): Yes. This is wrong. We should be determining the
+ # plugin to use based on a combination of discovery and inputs. Much
+ # of this can be changed when we get keystoneclient 0.10. For now this
+ # hardcoded path is EXACTLY the same as the original auth_token did.
+ auth_url = '%s/v2.0' % self._identity_uri
+
+ if admin_token:
+ log.warning(_LW(
+ "The admin_token option in the auth_token middleware is "
+ "deprecated and should not be used. The admin_user and "
+ "admin_password options should be used instead. The "
+ "admin_token option may be removed in a future release."))
+ self._plugin = token_endpoint.Token(auth_url, admin_token)
+ else:
+ self._plugin = v2.Password(auth_url,
+ username=admin_user,
+ password=admin_password,
+ tenant_name=admin_tenant_name)
+
+ self._LOG = log
+ self._discover = None
+
+ def get_token(self, *args, **kwargs):
+ return self._plugin.get_token(*args, **kwargs)
+
+ def get_endpoint(self, session, interface=None, version=None, **kwargs):
+ """Return an endpoint for the client.
+
+ There are no required keyword arguments to ``get_endpoint`` as a plugin
+ implementation should use best effort with the information available to
+ determine the endpoint.
+
+ :param session: The session object that the auth_plugin belongs to.
+ :type session: keystoneclient.session.Session
+ :param tuple version: The version number required for this endpoint.
+ :param str interface: what visibility the endpoint should have.
+
+ :returns: The base URL that will be used to talk to the required
+ service or None if not available.
+ :rtype: string
+ """
+ if interface == auth.AUTH_INTERFACE:
+ return self._identity_uri
+
+ if not version:
+ # NOTE(jamielennox): This plugin can only be used within auth_token
+ # and auth_token will always provide version= with requests.
+ return None
+
+ if not self._discover:
+ self._discover = discover.Discover(session,
+ auth_url=self._identity_uri,
+ authenticated=False)
+
+ if not self._discover.url_for(version):
+ # NOTE(jamielennox): The requested version is not supported by the
+ # identity server.
+ return None
+
+ # NOTE(jamielennox): for backwards compatibility here we don't
+ # actually use the URL from discovery we hack it up instead. :(
+ if version[0] == 2:
+ return '%s/v2.0' % self._identity_uri
+ elif version[0] == 3:
+ return '%s/v3' % self._identity_uri
+
+ # NOTE(jamielennox): This plugin will only get called from auth_token
+ # middleware. The middleware should never request a version that the
+ # plugin doesn't know how to handle.
+ msg = _('Invalid version asked for in auth_token plugin')
+ raise NotImplementedError(msg)
+
+ def invalidate(self):
+ return self._plugin.invalidate()
+
+ @classmethod
+ def get_options(cls):
+ options = super(AuthTokenPlugin, cls).get_options()
+
+ options.extend([
+ cfg.StrOpt('auth_admin_prefix',
+ default='',
+ help='Prefix to prepend at the beginning of the path. '
+ 'Deprecated, use identity_uri.'),
+ cfg.StrOpt('auth_host',
+ default='127.0.0.1',
+ help='Host providing the admin Identity API endpoint. '
+ 'Deprecated, use identity_uri.'),
+ cfg.IntOpt('auth_port',
+ default=35357,
+ help='Port of the admin Identity API endpoint. '
+ 'Deprecated, use identity_uri.'),
+ cfg.StrOpt('auth_protocol',
+ default='https',
+ help='Protocol of the admin Identity API endpoint '
+ '(http or https). Deprecated, use identity_uri.'),
+ cfg.StrOpt('identity_uri',
+ default=None,
+ help='Complete admin Identity API endpoint. This '
+ 'should specify the unversioned root endpoint '
+ 'e.g. https://localhost:35357/'),
+ cfg.StrOpt('admin_token',
+ secret=True,
+ help='This option is deprecated and may be removed in '
+ 'a future release. Single shared secret with the '
+ 'Keystone configuration used for bootstrapping a '
+ 'Keystone installation, or otherwise bypassing '
+ 'the normal authentication process. This option '
+ 'should not be used, use `admin_user` and '
+ '`admin_password` instead.'),
+ cfg.StrOpt('admin_user',
+ help='Service username.'),
+ cfg.StrOpt('admin_password',
+ secret=True,
+ help='Service user password.'),
+ cfg.StrOpt('admin_tenant_name',
+ default='admin',
+ help='Service tenant name.'),
+ ])
+
+ return options
+
+
+auth.register_conf_options(cfg.CONF, _base.AUTHTOKEN_GROUP)
+AuthTokenPlugin.register_conf_options(cfg.CONF, _base.AUTHTOKEN_GROUP)
diff --git a/keystonemiddleware-moon/keystonemiddleware/auth_token/_base.py b/keystonemiddleware-moon/keystonemiddleware/auth_token/_base.py
new file mode 100644
index 00000000..ee4ec13c
--- /dev/null
+++ b/keystonemiddleware-moon/keystonemiddleware/auth_token/_base.py
@@ -0,0 +1,13 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+AUTHTOKEN_GROUP = 'keystone_authtoken'
diff --git a/keystonemiddleware-moon/keystonemiddleware/auth_token/_cache.py b/keystonemiddleware-moon/keystonemiddleware/auth_token/_cache.py
new file mode 100644
index 00000000..ae155776
--- /dev/null
+++ b/keystonemiddleware-moon/keystonemiddleware/auth_token/_cache.py
@@ -0,0 +1,367 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import contextlib
+
+from keystoneclient.common import cms
+from oslo_serialization import jsonutils
+from oslo_utils import timeutils
+import six
+
+from keystonemiddleware.auth_token import _exceptions as exc
+from keystonemiddleware.auth_token import _memcache_crypt as memcache_crypt
+from keystonemiddleware.i18n import _, _LE
+from keystonemiddleware.openstack.common import memorycache
+
+
+class _CachePool(list):
+ """A lazy pool of cache references."""
+
+ def __init__(self, cache, memcached_servers):
+ self._environment_cache = cache
+ self._memcached_servers = memcached_servers
+
+ @contextlib.contextmanager
+ def reserve(self):
+ """Context manager to manage a pooled cache reference."""
+ if self._environment_cache is not None:
+ # skip pooling and just use the cache from the upstream filter
+ yield self._environment_cache
+ return # otherwise the context manager will continue!
+
+ try:
+ c = self.pop()
+ except IndexError:
+ # the pool is empty, so we need to create a new client
+ c = memorycache.get_client(self._memcached_servers)
+
+ try:
+ yield c
+ finally:
+ self.append(c)
+
+
+class _MemcacheClientPool(object):
+ """An advanced memcached client pool that is eventlet safe."""
+ def __init__(self, memcache_servers, memcache_dead_retry=None,
+ memcache_pool_maxsize=None, memcache_pool_unused_timeout=None,
+ memcache_pool_conn_get_timeout=None,
+ memcache_pool_socket_timeout=None):
+ # NOTE(morganfainberg): import here to avoid hard dependency on
+ # python-memcache library.
+ global _memcache_pool
+ from keystonemiddleware.auth_token import _memcache_pool
+
+ self._pool = _memcache_pool.MemcacheClientPool(
+ memcache_servers,
+ arguments={
+ 'dead_retry': memcache_dead_retry,
+ 'socket_timeout': memcache_pool_socket_timeout,
+ },
+ maxsize=memcache_pool_maxsize,
+ unused_timeout=memcache_pool_unused_timeout,
+ conn_get_timeout=memcache_pool_conn_get_timeout,
+ )
+
+ @contextlib.contextmanager
+ def reserve(self):
+ with self._pool.get() as client:
+ yield client
+
+
+class TokenCache(object):
+ """Encapsulates the auth_token token cache functionality.
+
+ auth_token caches tokens that it's seen so that when a token is re-used the
+ middleware doesn't have to do a more expensive operation (like going to the
+ identity server) to validate the token.
+
+ initialize() must be called before calling the other methods.
+
+ Store a valid token in the cache using store(); mark a token as invalid in
+ the cache using store_invalid().
+
+ Check if a token is in the cache and retrieve it using get().
+
+ """
+
+ _CACHE_KEY_TEMPLATE = 'tokens/%s'
+ _INVALID_INDICATOR = 'invalid'
+
+ def __init__(self, log, cache_time=None, hash_algorithms=None,
+ env_cache_name=None, memcached_servers=None,
+ use_advanced_pool=False, memcache_pool_dead_retry=None,
+ memcache_pool_maxsize=None, memcache_pool_unused_timeout=None,
+ memcache_pool_conn_get_timeout=None,
+ memcache_pool_socket_timeout=None):
+ self._LOG = log
+ self._cache_time = cache_time
+ self._hash_algorithms = hash_algorithms
+ self._env_cache_name = env_cache_name
+ self._memcached_servers = memcached_servers
+ self._use_advanced_pool = use_advanced_pool
+ self._memcache_pool_dead_retry = memcache_pool_dead_retry,
+ self._memcache_pool_maxsize = memcache_pool_maxsize,
+ self._memcache_pool_unused_timeout = memcache_pool_unused_timeout
+ self._memcache_pool_conn_get_timeout = memcache_pool_conn_get_timeout
+ self._memcache_pool_socket_timeout = memcache_pool_socket_timeout
+
+ self._cache_pool = None
+ self._initialized = False
+
+ def _get_cache_pool(self, cache, memcache_servers, use_advanced_pool=False,
+ memcache_dead_retry=None, memcache_pool_maxsize=None,
+ memcache_pool_unused_timeout=None,
+ memcache_pool_conn_get_timeout=None,
+ memcache_pool_socket_timeout=None):
+ if use_advanced_pool is True and memcache_servers and cache is None:
+ return _MemcacheClientPool(
+ memcache_servers,
+ memcache_dead_retry=memcache_dead_retry,
+ memcache_pool_maxsize=memcache_pool_maxsize,
+ memcache_pool_unused_timeout=memcache_pool_unused_timeout,
+ memcache_pool_conn_get_timeout=memcache_pool_conn_get_timeout,
+ memcache_pool_socket_timeout=memcache_pool_socket_timeout)
+ else:
+ return _CachePool(cache, memcache_servers)
+
+ def initialize(self, env):
+ if self._initialized:
+ return
+
+ self._cache_pool = self._get_cache_pool(
+ env.get(self._env_cache_name),
+ self._memcached_servers,
+ use_advanced_pool=self._use_advanced_pool,
+ memcache_dead_retry=self._memcache_pool_dead_retry,
+ memcache_pool_maxsize=self._memcache_pool_maxsize,
+ memcache_pool_unused_timeout=self._memcache_pool_unused_timeout,
+ memcache_pool_conn_get_timeout=self._memcache_pool_conn_get_timeout
+ )
+
+ self._initialized = True
+
+ def get(self, user_token):
+ """Check if the token is cached already.
+
+ Returns a tuple. The first element is a list of token IDs, where the
+ first one is the preferred hash.
+
+ The second element is the token data from the cache if the token was
+ cached, otherwise ``None``.
+
+ :raises exc.InvalidToken: if the token is invalid
+
+ """
+
+ if cms.is_asn1_token(user_token) or cms.is_pkiz(user_token):
+ # user_token is a PKI token that's not hashed.
+
+ token_hashes = list(cms.cms_hash_token(user_token, mode=algo)
+ for algo in self._hash_algorithms)
+
+ for token_hash in token_hashes:
+ cached = self._cache_get(token_hash)
+ if cached:
+ return (token_hashes, cached)
+
+ # The token wasn't found using any hash algorithm.
+ return (token_hashes, None)
+
+ # user_token is either a UUID token or a hashed PKI token.
+ token_id = user_token
+ cached = self._cache_get(token_id)
+ return ([token_id], cached)
+
+ def store(self, token_id, data, expires):
+ """Put token data into the cache.
+
+ Stores the parsed expire date in cache allowing
+ quick check of token freshness on retrieval.
+
+ """
+ self._LOG.debug('Storing token in cache')
+ self._cache_store(token_id, (data, expires))
+
+ def store_invalid(self, token_id):
+ """Store invalid token in cache."""
+ self._LOG.debug('Marking token as unauthorized in cache')
+ self._cache_store(token_id, self._INVALID_INDICATOR)
+
+ def _get_cache_key(self, token_id):
+ """Get a unique key for this token id.
+
+ Turn the token_id into something that can uniquely identify that token
+ in a key value store.
+
+ As this is generally the first function called in a key lookup this
+ function also returns a context object. This context object is not
+ modified or used by the Cache object but is passed back on subsequent
+ functions so that decryption or other data can be shared throughout a
+ cache lookup.
+
+ :param str token_id: The unique token id.
+
+ :returns: A tuple of a string key and an implementation specific
+ context object
+ """
+ # NOTE(jamielennox): in the basic implementation there is no need for
+ # a context so just pass None as it will only get passed back later.
+ unused_context = None
+ return self._CACHE_KEY_TEMPLATE % token_id, unused_context
+
+ def _deserialize(self, data, context):
+ """Deserialize data from the cache back into python objects.
+
+ Take data retrieved from the cache and return an appropriate python
+ dictionary.
+
+ :param str data: The data retrieved from the cache.
+ :param object context: The context that was returned from
+ _get_cache_key.
+
+ :returns: The python object that was saved.
+ """
+ # memory cache will handle deserialization for us
+ return data
+
+ def _serialize(self, data, context):
+ """Serialize data so that it can be saved to the cache.
+
+ Take python objects and serialize them so that they can be saved into
+ the cache.
+
+ :param object data: The data to be cached.
+ :param object context: The context that was returned from
+ _get_cache_key.
+
+ :returns: The python object that was saved.
+ """
+ # memory cache will handle serialization for us
+ return data
+
+ def _cache_get(self, token_id):
+ """Return token information from cache.
+
+ If token is invalid raise exc.InvalidToken
+ return token only if fresh (not expired).
+ """
+
+ if not token_id:
+ # Nothing to do
+ return
+
+ key, context = self._get_cache_key(token_id)
+
+ with self._cache_pool.reserve() as cache:
+ serialized = cache.get(key)
+
+ if serialized is None:
+ return None
+
+ data = self._deserialize(serialized, context)
+
+ # Note that _INVALID_INDICATOR and (data, expires) are the only
+ # valid types of serialized cache entries, so there is not
+ # a collision with jsonutils.loads(serialized) == None.
+ if not isinstance(data, six.string_types):
+ data = data.decode('utf-8')
+ cached = jsonutils.loads(data)
+ if cached == self._INVALID_INDICATOR:
+ self._LOG.debug('Cached Token is marked unauthorized')
+ raise exc.InvalidToken(_('Token authorization failed'))
+
+ data, expires = cached
+
+ try:
+ expires = timeutils.parse_isotime(expires)
+ except ValueError:
+ # Gracefully handle upgrade of expiration times from *nix
+ # timestamps to ISO 8601 formatted dates by ignoring old cached
+ # values.
+ return
+
+ expires = timeutils.normalize_time(expires)
+ utcnow = timeutils.utcnow()
+ if utcnow < expires:
+ self._LOG.debug('Returning cached token')
+ return data
+ else:
+ self._LOG.debug('Cached Token seems expired')
+ raise exc.InvalidToken(_('Token authorization failed'))
+
+ def _cache_store(self, token_id, data):
+ """Store value into memcache.
+
+ data may be _INVALID_INDICATOR or a tuple like (data, expires)
+
+ """
+ data = jsonutils.dumps(data)
+ if isinstance(data, six.text_type):
+ data = data.encode('utf-8')
+
+ cache_key, context = self._get_cache_key(token_id)
+ data_to_store = self._serialize(data, context)
+
+ with self._cache_pool.reserve() as cache:
+ cache.set(cache_key, data_to_store, time=self._cache_time)
+
+
+class SecureTokenCache(TokenCache):
+ """A token cache that stores tokens encrypted.
+
+ A more secure version of TokenCache that will encrypt tokens before
+ caching them.
+ """
+
+ def __init__(self, log, security_strategy, secret_key, **kwargs):
+ super(SecureTokenCache, self).__init__(log, **kwargs)
+
+ security_strategy = security_strategy.upper()
+
+ if security_strategy not in ('MAC', 'ENCRYPT'):
+ msg = _('memcache_security_strategy must be ENCRYPT or MAC')
+ raise exc.ConfigurationError(msg)
+ if not secret_key:
+ msg = _('memcache_secret_key must be defined when a '
+ 'memcache_security_strategy is defined')
+ raise exc.ConfigurationError(msg)
+
+ if isinstance(security_strategy, six.string_types):
+ security_strategy = security_strategy.encode('utf-8')
+ if isinstance(secret_key, six.string_types):
+ secret_key = secret_key.encode('utf-8')
+
+ self._security_strategy = security_strategy
+ self._secret_key = secret_key
+
+ def _get_cache_key(self, token_id):
+ context = memcache_crypt.derive_keys(token_id,
+ self._secret_key,
+ self._security_strategy)
+ key = self._CACHE_KEY_TEMPLATE % memcache_crypt.get_cache_key(context)
+ return key, context
+
+ def _deserialize(self, data, context):
+ try:
+ # unprotect_data will return None if raw_cached is None
+ return memcache_crypt.unprotect_data(context, data)
+ except Exception:
+ msg = _LE('Failed to decrypt/verify cache data')
+ self._LOG.exception(msg)
+
+ # this should have the same effect as data not
+ # found in cache
+ return None
+
+ def _serialize(self, data, context):
+ return memcache_crypt.protect_data(context, data)
diff --git a/keystonemiddleware-moon/keystonemiddleware/auth_token/_exceptions.py b/keystonemiddleware-moon/keystonemiddleware/auth_token/_exceptions.py
new file mode 100644
index 00000000..be045c96
--- /dev/null
+++ b/keystonemiddleware-moon/keystonemiddleware/auth_token/_exceptions.py
@@ -0,0 +1,27 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+
+class InvalidToken(Exception):
+ pass
+
+
+class ServiceError(Exception):
+ pass
+
+
+class ConfigurationError(Exception):
+ pass
+
+
+class RevocationListError(Exception):
+ pass
diff --git a/keystonemiddleware-moon/keystonemiddleware/auth_token/_identity.py b/keystonemiddleware-moon/keystonemiddleware/auth_token/_identity.py
new file mode 100644
index 00000000..8acf70d1
--- /dev/null
+++ b/keystonemiddleware-moon/keystonemiddleware/auth_token/_identity.py
@@ -0,0 +1,243 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from keystoneclient import auth
+from keystoneclient import discover
+from keystoneclient import exceptions
+from oslo_serialization import jsonutils
+from six.moves import urllib
+
+from keystonemiddleware.auth_token import _auth
+from keystonemiddleware.auth_token import _exceptions as exc
+from keystonemiddleware.auth_token import _utils
+from keystonemiddleware.i18n import _, _LE, _LI, _LW
+
+
+class _RequestStrategy(object):
+
+ AUTH_VERSION = None
+
+ def __init__(self, json_request, adap, include_service_catalog=None):
+ self._json_request = json_request
+ self._adapter = adap
+ self._include_service_catalog = include_service_catalog
+
+ def verify_token(self, user_token):
+ pass
+
+ def fetch_cert_file(self, cert_type):
+ pass
+
+
+class _V2RequestStrategy(_RequestStrategy):
+
+ AUTH_VERSION = (2, 0)
+
+ def verify_token(self, user_token):
+ return self._json_request('GET',
+ '/tokens/%s' % user_token,
+ authenticated=True)
+
+ def fetch_cert_file(self, cert_type):
+ return self._adapter.get('/certificates/%s' % cert_type,
+ authenticated=False)
+
+
+class _V3RequestStrategy(_RequestStrategy):
+
+ AUTH_VERSION = (3, 0)
+
+ def verify_token(self, user_token):
+ path = '/auth/tokens'
+ if not self._include_service_catalog:
+ path += '?nocatalog'
+
+ return self._json_request('GET',
+ path,
+ authenticated=True,
+ headers={'X-Subject-Token': user_token})
+
+ def fetch_cert_file(self, cert_type):
+ if cert_type == 'signing':
+ cert_type = 'certificates'
+
+ return self._adapter.get('/OS-SIMPLE-CERT/%s' % cert_type,
+ authenticated=False)
+
+
+_REQUEST_STRATEGIES = [_V3RequestStrategy, _V2RequestStrategy]
+
+
+class IdentityServer(object):
+ """Base class for operations on the Identity API server.
+
+ The auth_token middleware needs to communicate with the Identity API server
+ to validate UUID tokens, fetch the revocation list, signing certificates,
+ etc. This class encapsulates the data and methods to perform these
+ operations.
+
+ """
+
+ def __init__(self, log, adap, include_service_catalog=None,
+ requested_auth_version=None):
+ self._LOG = log
+ self._adapter = adap
+ self._include_service_catalog = include_service_catalog
+ self._requested_auth_version = requested_auth_version
+
+ # Built on-demand with self._request_strategy.
+ self._request_strategy_obj = None
+
+ @property
+ def auth_uri(self):
+ auth_uri = self._adapter.get_endpoint(interface=auth.AUTH_INTERFACE)
+
+ # NOTE(jamielennox): This weird stripping of the prefix hack is
+ # only relevant to the legacy case. We urljoin '/' to get just the
+ # base URI as this is the original behaviour.
+ if isinstance(self._adapter.auth, _auth.AuthTokenPlugin):
+ auth_uri = urllib.parse.urljoin(auth_uri, '/').rstrip('/')
+
+ return auth_uri
+
+ @property
+ def auth_version(self):
+ return self._request_strategy.AUTH_VERSION
+
+ @property
+ def _request_strategy(self):
+ if not self._request_strategy_obj:
+ strategy_class = self._get_strategy_class()
+ self._adapter.version = strategy_class.AUTH_VERSION
+
+ self._request_strategy_obj = strategy_class(
+ self._json_request,
+ self._adapter,
+ include_service_catalog=self._include_service_catalog)
+
+ return self._request_strategy_obj
+
+ def _get_strategy_class(self):
+ if self._requested_auth_version:
+ # A specific version was requested.
+ if discover.version_match(_V3RequestStrategy.AUTH_VERSION,
+ self._requested_auth_version):
+ return _V3RequestStrategy
+
+ # The version isn't v3 so we don't know what to do. Just assume V2.
+ return _V2RequestStrategy
+
+ # Specific version was not requested then we fall through to
+ # discovering available versions from the server
+ for klass in _REQUEST_STRATEGIES:
+ if self._adapter.get_endpoint(version=klass.AUTH_VERSION):
+ msg = _LI('Auth Token confirmed use of %s apis')
+ self._LOG.info(msg, self._requested_auth_version)
+ return klass
+
+ versions = ['v%d.%d' % s.AUTH_VERSION for s in _REQUEST_STRATEGIES]
+ self._LOG.error(_LE('No attempted versions [%s] supported by server'),
+ ', '.join(versions))
+
+ msg = _('No compatible apis supported by server')
+ raise exc.ServiceError(msg)
+
+ def verify_token(self, user_token, retry=True):
+ """Authenticate user token with identity server.
+
+ :param user_token: user's token id
+ :param retry: flag that forces the middleware to retry
+ user authentication when an indeterminate
+ response is received. Optional.
+ :returns: token object received from identity server on success
+ :raises exc.InvalidToken: if token is rejected
+ :raises exc.ServiceError: if unable to authenticate token
+
+ """
+ user_token = _utils.safe_quote(user_token)
+
+ try:
+ response, data = self._request_strategy.verify_token(user_token)
+ except exceptions.NotFound as e:
+ self._LOG.warn(_LW('Authorization failed for token'))
+ self._LOG.warn(_LW('Identity response: %s'), e.response.text)
+ except exceptions.Unauthorized as e:
+ self._LOG.info(_LI('Identity server rejected authorization'))
+ self._LOG.warn(_LW('Identity response: %s'), e.response.text)
+ if retry:
+ self._LOG.info(_LI('Retrying validation'))
+ return self.verify_token(user_token, False)
+ except exceptions.HttpError as e:
+ self._LOG.error(
+ _LE('Bad response code while validating token: %s'),
+ e.http_status)
+ self._LOG.warn(_LW('Identity response: %s'), e.response.text)
+ else:
+ if response.status_code == 200:
+ return data
+
+ raise exc.InvalidToken()
+
+ def fetch_revocation_list(self):
+ try:
+ response, data = self._json_request(
+ 'GET', '/tokens/revoked',
+ authenticated=True,
+ endpoint_filter={'version': (2, 0)})
+ except exceptions.HTTPError as e:
+ msg = _('Failed to fetch token revocation list: %d')
+ raise exc.RevocationListError(msg % e.http_status)
+ if response.status_code != 200:
+ msg = _('Unable to fetch token revocation list.')
+ raise exc.RevocationListError(msg)
+ if 'signed' not in data:
+ msg = _('Revocation list improperly formatted.')
+ raise exc.RevocationListError(msg)
+ return data['signed']
+
+ def fetch_signing_cert(self):
+ return self._fetch_cert_file('signing')
+
+ def fetch_ca_cert(self):
+ return self._fetch_cert_file('ca')
+
+ def _json_request(self, method, path, **kwargs):
+ """HTTP request helper used to make json requests.
+
+ :param method: http method
+ :param path: relative request url
+ :param **kwargs: additional parameters used by session or endpoint
+ :returns: http response object, response body parsed as json
+ :raises ServerError: when unable to communicate with identity server.
+
+ """
+ headers = kwargs.setdefault('headers', {})
+ headers['Accept'] = 'application/json'
+
+ response = self._adapter.request(path, method, **kwargs)
+
+ try:
+ data = jsonutils.loads(response.text)
+ except ValueError:
+ self._LOG.debug('Identity server did not return json-encoded body')
+ data = {}
+
+ return response, data
+
+ def _fetch_cert_file(self, cert_type):
+ try:
+ response = self._request_strategy.fetch_cert_file(cert_type)
+ except exceptions.HTTPError as e:
+ raise exceptions.CertificateConfigError(e.details)
+ if response.status_code != 200:
+ raise exceptions.CertificateConfigError(response.text)
+ return response.text
diff --git a/keystonemiddleware-moon/keystonemiddleware/auth_token/_memcache_crypt.py b/keystonemiddleware-moon/keystonemiddleware/auth_token/_memcache_crypt.py
new file mode 100644
index 00000000..2e45571f
--- /dev/null
+++ b/keystonemiddleware-moon/keystonemiddleware/auth_token/_memcache_crypt.py
@@ -0,0 +1,210 @@
+# Copyright 2010-2013 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+Utilities for memcache encryption and integrity check.
+
+Data should be serialized before entering these functions. Encryption
+has a dependency on the pycrypto. If pycrypto is not available,
+CryptoUnavailableError will be raised.
+
+This module will not be called unless signing or encryption is enabled
+in the config. It will always validate signatures, and will decrypt
+data if encryption is enabled. It is not valid to mix protection
+modes.
+
+"""
+
+import base64
+import functools
+import hashlib
+import hmac
+import math
+import os
+import six
+import sys
+
+from keystonemiddleware.i18n import _
+
+# make sure pycrypto is available
+try:
+ from Crypto.Cipher import AES
+except ImportError:
+ AES = None
+
+HASH_FUNCTION = hashlib.sha384
+DIGEST_LENGTH = HASH_FUNCTION().digest_size
+DIGEST_SPLIT = DIGEST_LENGTH // 3
+DIGEST_LENGTH_B64 = 4 * int(math.ceil(DIGEST_LENGTH / 3.0))
+
+
+class InvalidMacError(Exception):
+ """raise when unable to verify MACed data.
+
+ This usually indicates that data had been expectedly modified in memcache.
+
+ """
+ pass
+
+
+class DecryptError(Exception):
+ """raise when unable to decrypt encrypted data.
+
+ """
+ pass
+
+
+class CryptoUnavailableError(Exception):
+ """raise when Python Crypto module is not available.
+
+ """
+ pass
+
+
+def assert_crypto_availability(f):
+ """Ensure Crypto module is available."""
+
+ @functools.wraps(f)
+ def wrapper(*args, **kwds):
+ if AES is None:
+ raise CryptoUnavailableError()
+ return f(*args, **kwds)
+ return wrapper
+
+
+if sys.version_info >= (3, 3):
+ constant_time_compare = hmac.compare_digest
+else:
+ def constant_time_compare(first, second):
+ """Returns True if both string inputs are equal, otherwise False.
+
+ This function should take a constant amount of time regardless of
+ how many characters in the strings match.
+
+ """
+ if len(first) != len(second):
+ return False
+ result = 0
+ if six.PY3 and isinstance(first, bytes) and isinstance(second, bytes):
+ for x, y in zip(first, second):
+ result |= x ^ y
+ else:
+ for x, y in zip(first, second):
+ result |= ord(x) ^ ord(y)
+ return result == 0
+
+
+def derive_keys(token, secret, strategy):
+ """Derives keys for MAC and ENCRYPTION from the user-provided
+ secret. The resulting keys should be passed to the protect and
+ unprotect functions.
+
+ As suggested by NIST Special Publication 800-108, this uses the
+ first 128 bits from the sha384 KDF for the obscured cache key
+ value, the second 128 bits for the message authentication key and
+ the remaining 128 bits for the encryption key.
+
+ This approach is faster than computing a separate hmac as the KDF
+ for each desired key.
+ """
+ digest = hmac.new(secret, token + strategy, HASH_FUNCTION).digest()
+ return {'CACHE_KEY': digest[:DIGEST_SPLIT],
+ 'MAC': digest[DIGEST_SPLIT: 2 * DIGEST_SPLIT],
+ 'ENCRYPTION': digest[2 * DIGEST_SPLIT:],
+ 'strategy': strategy}
+
+
+def sign_data(key, data):
+ """Sign the data using the defined function and the derived key."""
+ mac = hmac.new(key, data, HASH_FUNCTION).digest()
+ return base64.b64encode(mac)
+
+
+@assert_crypto_availability
+def encrypt_data(key, data):
+ """Encrypt the data with the given secret key.
+
+ Padding is n bytes of the value n, where 1 <= n <= blocksize.
+ """
+ iv = os.urandom(16)
+ cipher = AES.new(key, AES.MODE_CBC, iv)
+ padding = 16 - len(data) % 16
+ return iv + cipher.encrypt(data + six.int2byte(padding) * padding)
+
+
+@assert_crypto_availability
+def decrypt_data(key, data):
+ """Decrypt the data with the given secret key."""
+ iv = data[:16]
+ cipher = AES.new(key, AES.MODE_CBC, iv)
+ try:
+ result = cipher.decrypt(data[16:])
+ except Exception:
+ raise DecryptError(_('Encrypted data appears to be corrupted.'))
+
+ # Strip the last n padding bytes where n is the last value in
+ # the plaintext
+ return result[:-1 * six.byte2int([result[-1]])]
+
+
+def protect_data(keys, data):
+ """Given keys and serialized data, returns an appropriately
+ protected string suitable for storage in the cache.
+
+ """
+ if keys['strategy'] == b'ENCRYPT':
+ data = encrypt_data(keys['ENCRYPTION'], data)
+
+ encoded_data = base64.b64encode(data)
+
+ signature = sign_data(keys['MAC'], encoded_data)
+ return signature + encoded_data
+
+
+def unprotect_data(keys, signed_data):
+ """Given keys and cached string data, verifies the signature,
+ decrypts if necessary, and returns the original serialized data.
+
+ """
+ # cache backends return None when no data is found. We don't mind
+ # that this particular special value is unsigned.
+ if signed_data is None:
+ return None
+
+ # First we calculate the signature
+ provided_mac = signed_data[:DIGEST_LENGTH_B64]
+ calculated_mac = sign_data(
+ keys['MAC'],
+ signed_data[DIGEST_LENGTH_B64:])
+
+ # Then verify that it matches the provided value
+ if not constant_time_compare(provided_mac, calculated_mac):
+ raise InvalidMacError(_('Invalid MAC; data appears to be corrupted.'))
+
+ data = base64.b64decode(signed_data[DIGEST_LENGTH_B64:])
+
+ # then if necessary decrypt the data
+ if keys['strategy'] == b'ENCRYPT':
+ data = decrypt_data(keys['ENCRYPTION'], data)
+
+ return data
+
+
+def get_cache_key(keys):
+ """Given keys generated by derive_keys(), returns a base64
+ encoded value suitable for use as a cache key in memcached.
+
+ """
+ return base64.b64encode(keys['CACHE_KEY'])
diff --git a/keystonemiddleware-moon/keystonemiddleware/auth_token/_memcache_pool.py b/keystonemiddleware-moon/keystonemiddleware/auth_token/_memcache_pool.py
new file mode 100644
index 00000000..77652868
--- /dev/null
+++ b/keystonemiddleware-moon/keystonemiddleware/auth_token/_memcache_pool.py
@@ -0,0 +1,184 @@
+# Copyright 2014 Mirantis Inc
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""Thread-safe connection pool for python-memcached."""
+
+# NOTE(yorik-sar): this file is copied between keystone and keystonemiddleware
+# and should be kept in sync until we can use external library for this.
+
+import collections
+import contextlib
+import itertools
+import logging
+import time
+
+from six.moves import queue
+
+from keystonemiddleware.i18n import _LC
+
+
+_PoolItem = collections.namedtuple('_PoolItem', ['ttl', 'connection'])
+
+
+class ConnectionGetTimeoutException(Exception):
+ pass
+
+
+class ConnectionPool(queue.Queue):
+ """Base connection pool class
+
+ This class implements the basic connection pool logic as an abstract base
+ class.
+ """
+ def __init__(self, maxsize, unused_timeout, conn_get_timeout=None):
+ """Initialize the connection pool.
+
+ :param maxsize: maximum number of client connections for the pool
+ :type maxsize: int
+ :param unused_timeout: idle time to live for unused clients (in
+ seconds). If a client connection object has been
+ in the pool and idle for longer than the
+ unused_timeout, it will be reaped. This is to
+ ensure resources are released as utilization
+ goes down.
+ :type unused_timeout: int
+ :param conn_get_timeout: maximum time in seconds to wait for a
+ connection. If set to `None` timeout is
+ indefinite.
+ :type conn_get_timeout: int
+ """
+ queue.Queue.__init__(self, maxsize)
+ self._unused_timeout = unused_timeout
+ self._connection_get_timeout = conn_get_timeout
+ self._acquired = 0
+ self._LOG = logging.getLogger(__name__)
+
+ def _create_connection(self):
+ raise NotImplementedError
+
+ def _destroy_connection(self, conn):
+ raise NotImplementedError
+
+ @contextlib.contextmanager
+ def acquire(self):
+ try:
+ conn = self.get(timeout=self._connection_get_timeout)
+ except queue.Empty:
+ self._LOG.critical(_LC('Unable to get a connection from pool id '
+ '%(id)s after %(seconds)s seconds.'),
+ {'id': id(self),
+ 'seconds': self._connection_get_timeout})
+ raise ConnectionGetTimeoutException()
+ try:
+ yield conn
+ finally:
+ self.put(conn)
+
+ def _qsize(self):
+ return self.maxsize - self._acquired
+
+ if not hasattr(queue.Queue, '_qsize'):
+ qsize = _qsize
+
+ def _get(self):
+ if self.queue:
+ conn = self.queue.pop().connection
+ else:
+ conn = self._create_connection()
+ self._acquired += 1
+ return conn
+
+ def _put(self, conn):
+ self.queue.append(_PoolItem(
+ ttl=time.time() + self._unused_timeout,
+ connection=conn,
+ ))
+ self._acquired -= 1
+ # Drop all expired connections from the right end of the queue
+ now = time.time()
+ while self.queue and self.queue[0].ttl < now:
+ conn = self.queue.popleft().connection
+ self._destroy_connection(conn)
+
+
+class MemcacheClientPool(ConnectionPool):
+ def __init__(self, urls, arguments, **kwargs):
+ ConnectionPool.__init__(self, **kwargs)
+ self._urls = urls
+ self._arguments = arguments
+ # NOTE(morganfainberg): The host objects expect an int for the
+ # deaduntil value. Initialize this at 0 for each host with 0 indicating
+ # the host is not dead.
+ self._hosts_deaduntil = [0] * len(urls)
+
+ # NOTE(morganfainberg): Lazy import to allow middleware to work with
+ # python 3k even if memcache will not due to python 3k
+ # incompatibilities within the python-memcache library.
+ global memcache
+ import memcache
+
+ # This 'class' is taken from http://stackoverflow.com/a/22520633/238308
+ # Don't inherit client from threading.local so that we can reuse
+ # clients in different threads
+ MemcacheClient = type('_MemcacheClient', (object,),
+ dict(memcache.Client.__dict__))
+
+ self._memcache_client_class = MemcacheClient
+
+ def _create_connection(self):
+ return self._memcache_client_class(self._urls, **self._arguments)
+
+ def _destroy_connection(self, conn):
+ conn.disconnect_all()
+
+ def _get(self):
+ conn = ConnectionPool._get(self)
+ try:
+ # Propagate host state known to us to this client's list
+ now = time.time()
+ for deaduntil, host in zip(self._hosts_deaduntil, conn.servers):
+ if deaduntil > now and host.deaduntil <= now:
+ host.mark_dead('propagating death mark from the pool')
+ host.deaduntil = deaduntil
+ except Exception:
+ # We need to be sure that connection doesn't leak from the pool.
+ # This code runs before we enter context manager's try-finally
+ # block, so we need to explicitly release it here
+ ConnectionPool._put(self, conn)
+ raise
+ return conn
+
+ def _put(self, conn):
+ try:
+ # If this client found that one of the hosts is dead, mark it as
+ # such in our internal list
+ now = time.time()
+ for i, deaduntil, host in zip(itertools.count(),
+ self._hosts_deaduntil,
+ conn.servers):
+ # Do nothing if we already know this host is dead
+ if deaduntil <= now:
+ if host.deaduntil > now:
+ self._hosts_deaduntil[i] = host.deaduntil
+ else:
+ self._hosts_deaduntil[i] = 0
+ # If all hosts are dead we should forget that they're dead. This
+ # way we won't get completely shut off until dead_retry seconds
+ # pass, but will be checking servers as frequent as we can (over
+ # way smaller socket_timeout)
+ if all(deaduntil > now for deaduntil in self._hosts_deaduntil):
+ self._hosts_deaduntil[:] = [0] * len(self._hosts_deaduntil)
+ finally:
+ ConnectionPool._put(self, conn)
diff --git a/keystonemiddleware-moon/keystonemiddleware/auth_token/_revocations.py b/keystonemiddleware-moon/keystonemiddleware/auth_token/_revocations.py
new file mode 100644
index 00000000..8cc449ad
--- /dev/null
+++ b/keystonemiddleware-moon/keystonemiddleware/auth_token/_revocations.py
@@ -0,0 +1,106 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import datetime
+import logging
+import os
+
+from oslo_serialization import jsonutils
+from oslo_utils import timeutils
+
+from keystonemiddleware.auth_token import _exceptions as exc
+from keystonemiddleware.i18n import _
+
+_LOG = logging.getLogger(__name__)
+
+
+class Revocations(object):
+ _FILE_NAME = 'revoked.pem'
+
+ def __init__(self, timeout, signing_directory, identity_server,
+ cms_verify, log=_LOG):
+ self._cache_timeout = timeout
+ self._signing_directory = signing_directory
+ self._identity_server = identity_server
+ self._cms_verify = cms_verify
+ self._log = log
+
+ self._fetched_time_prop = None
+ self._list_prop = None
+
+ @property
+ def _fetched_time(self):
+ if not self._fetched_time_prop:
+ # If the fetched list has been written to disk, use its
+ # modification time.
+ file_path = self._signing_directory.calc_path(self._FILE_NAME)
+ if os.path.exists(file_path):
+ mtime = os.path.getmtime(file_path)
+ fetched_time = datetime.datetime.utcfromtimestamp(mtime)
+ # Otherwise the list will need to be fetched.
+ else:
+ fetched_time = datetime.datetime.min
+ self._fetched_time_prop = fetched_time
+ return self._fetched_time_prop
+
+ @_fetched_time.setter
+ def _fetched_time(self, value):
+ self._fetched_time_prop = value
+
+ def _fetch(self):
+ revocation_list_data = self._identity_server.fetch_revocation_list()
+ return self._cms_verify(revocation_list_data)
+
+ @property
+ def _list(self):
+ timeout = self._fetched_time + self._cache_timeout
+ list_is_current = timeutils.utcnow() < timeout
+
+ if list_is_current:
+ # Load the list from disk if required
+ if not self._list_prop:
+ self._list_prop = jsonutils.loads(
+ self._signing_directory.read_file(self._FILE_NAME))
+ else:
+ self._list = self._fetch()
+ return self._list_prop
+
+ @_list.setter
+ def _list(self, value):
+ """Save a revocation list to memory and to disk.
+
+ :param value: A json-encoded revocation list
+
+ """
+ self._list_prop = jsonutils.loads(value)
+ self._fetched_time = timeutils.utcnow()
+ self._signing_directory.write_file(self._FILE_NAME, value)
+
+ def _is_revoked(self, token_id):
+ """Indicate whether the token_id appears in the revocation list."""
+ revoked_tokens = self._list.get('revoked', None)
+ if not revoked_tokens:
+ return False
+
+ revoked_ids = (x['id'] for x in revoked_tokens)
+ return token_id in revoked_ids
+
+ def _any_revoked(self, token_ids):
+ for token_id in token_ids:
+ if self._is_revoked(token_id):
+ return True
+ return False
+
+ def check(self, token_ids):
+ if self._any_revoked(token_ids):
+ self._log.debug('Token is marked as having been revoked')
+ raise exc.InvalidToken(_('Token has been revoked'))
diff --git a/keystonemiddleware-moon/keystonemiddleware/auth_token/_signing_dir.py b/keystonemiddleware-moon/keystonemiddleware/auth_token/_signing_dir.py
new file mode 100644
index 00000000..f8b1a410
--- /dev/null
+++ b/keystonemiddleware-moon/keystonemiddleware/auth_token/_signing_dir.py
@@ -0,0 +1,83 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import logging
+import os
+import stat
+import tempfile
+
+import six
+
+from keystonemiddleware.auth_token import _exceptions as exc
+from keystonemiddleware.i18n import _, _LI, _LW
+
+_LOG = logging.getLogger(__name__)
+
+
+class SigningDirectory(object):
+
+ def __init__(self, directory_name=None, log=None):
+ self._log = log or _LOG
+
+ if directory_name is None:
+ directory_name = tempfile.mkdtemp(prefix='keystone-signing-')
+ self._log.info(
+ _LI('Using %s as cache directory for signing certificate'),
+ directory_name)
+ self._directory_name = directory_name
+
+ self._verify_signing_dir()
+
+ def write_file(self, file_name, new_contents):
+
+ # In Python2, encoding is slow so the following check avoids it if it
+ # is not absolutely necessary.
+ if isinstance(new_contents, six.text_type):
+ new_contents = new_contents.encode('utf-8')
+
+ def _atomic_write():
+ with tempfile.NamedTemporaryFile(dir=self._directory_name,
+ delete=False) as f:
+ f.write(new_contents)
+ os.rename(f.name, self.calc_path(file_name))
+
+ try:
+ _atomic_write()
+ except (OSError, IOError):
+ self._verify_signing_dir()
+ _atomic_write()
+
+ def read_file(self, file_name):
+ path = self.calc_path(file_name)
+ open_kwargs = {'encoding': 'utf-8'} if six.PY3 else {}
+ with open(path, 'r', **open_kwargs) as f:
+ return f.read()
+
+ def calc_path(self, file_name):
+ return os.path.join(self._directory_name, file_name)
+
+ def _verify_signing_dir(self):
+ if os.path.isdir(self._directory_name):
+ if not os.access(self._directory_name, os.W_OK):
+ raise exc.ConfigurationError(
+ _('unable to access signing_dir %s') %
+ self._directory_name)
+ uid = os.getuid()
+ if os.stat(self._directory_name).st_uid != uid:
+ self._log.warning(_LW('signing_dir is not owned by %s'), uid)
+ current_mode = stat.S_IMODE(os.stat(self._directory_name).st_mode)
+ if current_mode != stat.S_IRWXU:
+ self._log.warning(
+ _LW('signing_dir mode is %(mode)s instead of %(need)s'),
+ {'mode': oct(current_mode), 'need': oct(stat.S_IRWXU)})
+ else:
+ os.makedirs(self._directory_name, stat.S_IRWXU)
diff --git a/keystonemiddleware-moon/keystonemiddleware/auth_token/_user_plugin.py b/keystonemiddleware-moon/keystonemiddleware/auth_token/_user_plugin.py
new file mode 100644
index 00000000..12a8767c
--- /dev/null
+++ b/keystonemiddleware-moon/keystonemiddleware/auth_token/_user_plugin.py
@@ -0,0 +1,169 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from keystoneclient.auth.identity import base as base_identity
+
+
+class _TokenData(object):
+ """An abstraction to show auth_token consumers some of the token contents.
+
+ This is a simplified and cleaned up keystoneclient.access.AccessInfo object
+ with which services relying on auth_token middleware can find details of
+ the current token.
+ """
+
+ def __init__(self, auth_ref):
+ self._stored_auth_ref = auth_ref
+
+ @property
+ def _is_v2(self):
+ return self._stored_auth_ref.version == 'v2.0'
+
+ @property
+ def auth_token(self):
+ """The token data used to authenticate requests.
+
+ :returns: token data.
+ :rtype: str
+ """
+ return self._stored_auth_ref.auth_token
+
+ @property
+ def user_id(self):
+ """The user id associated with the authentication request.
+
+ :rtype: str
+ """
+ return self._stored_auth_ref.user_id
+
+ @property
+ def user_domain_id(self):
+ """Returns the domain id of the user associated with the authentication
+ request.
+
+ :returns: str
+ """
+ # NOTE(jamielennox): v2 AccessInfo returns 'default' for domain_id
+ # because it can't know that value. We want to return None instead.
+ if self._is_v2:
+ return None
+
+ return self._stored_auth_ref.user_domain_id
+
+ @property
+ def project_id(self):
+ """The project ID associated with the authentication.
+
+ :rtype: str
+ """
+ return self._stored_auth_ref.project_id
+
+ @property
+ def project_domain_id(self):
+ """The domain id of the project associated with the authentication
+ request.
+
+ :rtype: str
+ """
+ # NOTE(jamielennox): v2 AccessInfo returns 'default' for domain_id
+ # because it can't know that value. We want to return None instead.
+ if self._is_v2:
+ return None
+
+ return self._stored_auth_ref.project_domain_id
+
+ @property
+ def trust_id(self):
+ """Returns the trust id associated with the authentication request..
+
+ :rtype: str
+ """
+ return self._stored_auth_ref.trust_id
+
+ @property
+ def role_ids(self):
+ """Role ids of the user associated with the authentication request.
+
+ :rtype: set(str)
+ """
+ return frozenset(self._stored_auth_ref.role_ids or [])
+
+ @property
+ def role_names(self):
+ """Role names of the user associated with the authentication request.
+
+ :rtype: set(str)
+ """
+ return frozenset(self._stored_auth_ref.role_names or [])
+
+
+class UserAuthPlugin(base_identity.BaseIdentityPlugin):
+ """The incoming authentication credentials.
+
+ A plugin that represents the incoming user credentials. This can be
+ consumed by applications.
+
+ This object is not expected to be constructed directly by users. It is
+ created and passed by auth_token middleware and then can be used as the
+ authentication plugin when communicating via a session.
+ """
+
+ def __init__(self, user_auth_ref, serv_auth_ref):
+ super(UserAuthPlugin, self).__init__(reauthenticate=False)
+ self._user_auth_ref = user_auth_ref
+ self._serv_auth_ref = serv_auth_ref
+ self._user_data = None
+ self._serv_data = None
+
+ @property
+ def has_user_token(self):
+ """Did this authentication request contained a user auth token."""
+ return self._user_auth_ref is not None
+
+ @property
+ def user(self):
+ """Authentication information about the user token.
+
+ Will return None if a user token was not passed with this request.
+ """
+ if not self.has_user_token:
+ return None
+
+ if not self._user_data:
+ self._user_data = _TokenData(self._user_auth_ref)
+
+ return self._user_data
+
+ @property
+ def has_service_token(self):
+ """Did this authentication request contained a service token."""
+ return self._serv_auth_ref is not None
+
+ @property
+ def service(self):
+ """Authentication information about the service token.
+
+ Will return None if a user token was not passed with this request.
+ """
+ if not self.has_service_token:
+ return None
+
+ if not self._serv_data:
+ self._serv_data = _TokenData(self._serv_auth_ref)
+
+ return self._serv_data
+
+ def get_auth_ref(self, session, **kwargs):
+ # NOTE(jamielennox): We will always use the auth_ref that was
+ # calculated by the middleware. reauthenticate=False in __init__ should
+ # ensure that this function is only called on the first access.
+ return self._user_auth_ref
diff --git a/keystonemiddleware-moon/keystonemiddleware/auth_token/_utils.py b/keystonemiddleware-moon/keystonemiddleware/auth_token/_utils.py
new file mode 100644
index 00000000..daed02dd
--- /dev/null
+++ b/keystonemiddleware-moon/keystonemiddleware/auth_token/_utils.py
@@ -0,0 +1,32 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from six.moves import urllib
+
+
+def safe_quote(s):
+ """URL-encode strings that are not already URL-encoded."""
+ return urllib.parse.quote(s) if s == urllib.parse.unquote(s) else s
+
+
+class MiniResp(object):
+
+ def __init__(self, error_message, env, headers=[]):
+ # The HEAD method is unique: it must never return a body, even if
+ # it reports an error (RFC-2616 clause 9.4). We relieve callers
+ # from varying the error responses depending on the method.
+ if env['REQUEST_METHOD'] == 'HEAD':
+ self.body = ['']
+ else:
+ self.body = [error_message.encode()]
+ self.headers = list(headers)
+ self.headers.append(('Content-type', 'text/plain'))
diff --git a/keystonemiddleware-moon/keystonemiddleware/authz.py b/keystonemiddleware-moon/keystonemiddleware/authz.py
new file mode 100644
index 00000000..f969b2cc
--- /dev/null
+++ b/keystonemiddleware-moon/keystonemiddleware/authz.py
@@ -0,0 +1,326 @@
+# Copyright 2015 Open Platform for NFV Project, Inc. and its contributors
+# This software is distributed under the terms and conditions of the 'Apache-2.0'
+# license which can be found in the file 'LICENSE' in this package distribution
+# or at 'http://www.apache.org/licenses/LICENSE-2.0'.
+
+import webob
+import logging
+import json
+import six
+import requests
+import re
+import httplib
+
+from keystone import exception
+from cStringIO import StringIO
+from oslo.config import cfg
+# from keystoneclient import auth
+from keystonemiddleware.i18n import _, _LC, _LE, _LI, _LW
+
+
+_OPTS = [
+ cfg.StrOpt('auth_uri',
+ default="http://127.0.0.1:35357/v3",
+ help='Complete public Identity API endpoint.'),
+ cfg.StrOpt('auth_version',
+ default=None,
+ help='API version of the admin Identity API endpoint.'),
+ cfg.StrOpt('authz_login',
+ default="admin",
+ help='Name of the administrator who will connect to the Keystone Moon backends.'),
+ cfg.StrOpt('authz_password',
+ default="nomoresecrete",
+ help='Password of the administrator who will connect to the Keystone Moon backends.'),
+ cfg.StrOpt('logfile',
+ default="/tmp/authz.log",
+ help='File where logs goes.'),
+ ]
+
+_AUTHZ_GROUP = 'keystone_authz'
+CONF = cfg.CONF
+CONF.register_opts(_OPTS, group=_AUTHZ_GROUP)
+# auth.register_conf_options(CONF, _AUTHZ_GROUP)
+
+# from http://developer.openstack.org/api-ref-objectstorage-v1.html
+SWIFT_API = (
+ ("^/v1/(?P<account>[\w-]+)$", "GET", "get_account_details"),
+ ("^/v1/(?P<account>[\w-]+)$", "POST", "modify_account"),
+ ("^/v1/(?P<account>[\w-]+)$", "HEAD", "get_account"),
+ ("^/v1/(?P<account>[\w-]+)/(?P<container>[\w-]+)$", "GET", "get_container"),
+ ("^/v1/(?P<account>[\w-]+)/(?P<container>[\w-]+)$", "PUT", "create_container"),
+ ("^/v1/(?P<account>[\w-]+)/(?P<container>[\w-]+)$", "POST", "update_container_metadata"),
+ ("^/v1/(?P<account>[\w-]+)/(?P<container>[\w-]+)$", "DELETE", "delete_container"),
+ ("^/v1/(?P<account>[\w-]+)/(?P<container>[\w-]+)$", "HEAD", "get_container_metadata"),
+ ("^/v1/(?P<account>[\w-]+)/(?P<container>[\w-]+)/(?P<object>[\w-]+)$", "GET", "get_object"),
+ ("^/v1/(?P<account>[\w-]+)/(?P<container>[\w-]+)/(?P<object>[\w-]+)$", "PUT", "create_object"),
+ ("^/v1/(?P<account>[\w-]+)/(?P<container>[\w-]+)/(?P<object>[\w-]+)$", "COPY", "copy_object"),
+ ("^/v1/(?P<account>[\w-]+)/(?P<container>[\w-]+)/(?P<object>[\w-]+)$", "POST", "update_object_metadata"),
+ ("^/v1/(?P<account>[\w-]+)/(?P<container>[\w-]+)/(?P<object>[\w-]+)$", "DELETE", "delete_object"),
+ ("^/v1/(?P<account>[\w-]+)/(?P<container>[\w-]+)/(?P<object>[\w-]+)$", "HEAD", "get_object_metadata"),
+)
+
+
+class ServiceError(Exception):
+ pass
+
+
+class AuthZProtocol(object):
+ """Middleware that handles authenticating client calls."""
+
+ post = {
+ "auth": {
+ "identity": {
+ "methods": [
+ "password"
+ ],
+ "password": {
+ "user": {
+ "domain": {
+ "id": "Default"
+ },
+ "name": "admin",
+ "password": "nomoresecrete"
+ }
+ }
+ },
+ "scope": {
+ "project": {
+ "domain": {
+ "id": "Default"
+ },
+ "name": "demo"
+ }
+ }
+ }
+ }
+
+ def __init__(self, app, conf):
+ self._LOG = logging.getLogger(conf.get('log_name', __name__))
+ # FIXME: events are duplicated in log file
+ authz_fh = logging.FileHandler(CONF.keystone_authz["logfile"])
+ self._LOG.setLevel(logging.DEBUG)
+ self._LOG.addHandler(authz_fh)
+ self._LOG.info(_LI('Starting Keystone authz middleware'))
+ self._conf = conf
+ self._app = app
+
+ # MOON
+ self.auth_host = conf.get('auth_host', "127.0.0.1")
+ self.auth_port = int(conf.get('auth_port', 35357))
+ auth_protocol = conf.get('auth_protocol', 'http')
+ self._request_uri = '%s://%s:%s' % (auth_protocol, self.auth_host,
+ self.auth_port)
+
+ # SSL
+ insecure = conf.get('insecure', False)
+ cert_file = conf.get('certfile')
+ key_file = conf.get('keyfile')
+
+ if insecure:
+ self._verify = False
+ elif cert_file and key_file:
+ self._verify = (cert_file, key_file)
+ elif cert_file:
+ self._verify = cert_file
+ else:
+ self._verify = None
+
+ def __set_token(self):
+ data = self.get_url("/v3/auth/tokens", post_data=self.post)
+ if "token" not in data:
+ raise Exception("Authentication problem ({})".format(data))
+ self.token = data["token"]
+
+ def __unset_token(self):
+ data = self.get_url("/v3/auth/tokens", method="DELETE", authtoken=True)
+ if "content" in data and len(data["content"]) > 0:
+ self._LOG.error("Error while unsetting token {}".format(data["content"]))
+ self.token = None
+
+ def get_url(self, url, post_data=None, delete_data=None, method="GET", authtoken=None):
+ if post_data:
+ method = "POST"
+ if delete_data:
+ method = "DELETE"
+ self._LOG.debug("\033[32m{} {}\033[m".format(method, url))
+ conn = httplib.HTTPConnection(self.auth_host, self.auth_port)
+ headers = {
+ "Content-type": "application/x-www-form-urlencoded",
+ "Accept": "text/plain,text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8",
+ }
+ if authtoken:
+ if self.x_subject_token:
+ if method == "DELETE":
+ headers["X-Subject-Token"] = self.x_subject_token
+ headers["X-Auth-Token"] = self.x_subject_token
+ else:
+ headers["X-Auth-Token"] = self.x_subject_token
+ if post_data:
+ method = "POST"
+ headers["Content-type"] = "application/json"
+ post_data = json.dumps(post_data)
+ conn.request(method, url, post_data, headers=headers)
+ elif delete_data:
+ method = "DELETE"
+ conn.request(method, url, json.dumps(delete_data), headers=headers)
+ else:
+ conn.request(method, url, headers=headers)
+ resp = conn.getresponse()
+ headers = resp.getheaders()
+ try:
+ self.x_subject_token = dict(headers)["x-subject-token"]
+ except KeyError:
+ pass
+ content = resp.read()
+ conn.close()
+ try:
+ return json.loads(content)
+ except ValueError:
+ return {"content": content}
+
+ def _deny_request(self, code):
+ error_table = {
+ 'AccessDenied': (401, 'Access denied'),
+ 'InvalidURI': (400, 'Could not parse the specified URI'),
+ 'NotFound': (404, 'URI not found'),
+ 'Error': (500, 'Server error'),
+ }
+ resp = webob.Response(content_type='text/xml')
+ resp.status = error_table[code][0]
+ error_msg = ('<?xml version="1.0" encoding="UTF-8"?>\r\n'
+ '<Error>\r\n <Code>%s</Code>\r\n '
+ '<Message>%s</Message>\r\n</Error>\r\n' %
+ (code, error_table[code][1]))
+ if six.PY3:
+ error_msg = error_msg.encode()
+ resp.body = error_msg
+ return resp
+
+ def _get_authz_from_moon(self, auth_token, tenant_id, subject_id, object_id, action_id):
+ headers = {'X-Auth-Token': auth_token}
+ self._LOG.debug('X-Auth-Token={}'.format(auth_token))
+ try:
+ _url ='{}/v3/OS-MOON/authz/{}/{}/{}/{}'.format(
+ self._request_uri,
+ tenant_id,
+ subject_id,
+ object_id,
+ action_id)
+ self._LOG.info(_url)
+ response = requests.get(_url,
+ headers=headers,
+ verify=self._verify)
+ except requests.exceptions.RequestException as e:
+ self._LOG.error(_LI('HTTP connection exception: %s'), e)
+ resp = self._deny_request('InvalidURI')
+ raise ServiceError(resp)
+
+ if response.status_code < 200 or response.status_code >= 300:
+ self._LOG.debug('Keystone reply error: status=%s reason=%s',
+ response.status_code, response.reason)
+ if response.status_code == 404:
+ resp = self._deny_request('NotFound')
+ elif response.status_code == 401:
+ resp = self._deny_request('AccessDenied')
+ else:
+ resp = self._deny_request('Error')
+ raise ServiceError(resp)
+
+ return response
+
+ def _find_openstack_component(self, env):
+ if "nova.context" in env.keys():
+ return "nova"
+ elif "swift.authorize" in env.keys():
+ return "swift"
+ else:
+ self._LOG.debug(env.keys())
+ return "unknown"
+
+ def _get_action(self, env, component):
+ """ Find and return the action of the request
+ Actually, find only Nova action (start, destroy, pause, unpause, ...)
+
+ :param env: the request
+ :return: the action or ""
+ """
+ action = ""
+ if component == "nova":
+ length = int(env.get('CONTENT_LENGTH', '0'))
+ # TODO (dthom): compute for Nova, Cinder, Neutron, ...
+ action = ""
+ if length > 0:
+ try:
+ sub_action_object = env['wsgi.input'].read(length)
+ action = json.loads(sub_action_object).keys()[0]
+ body = StringIO(sub_action_object)
+ env['wsgi.input'] = body
+ except ValueError:
+ self._LOG.error("Error in decoding sub-action")
+ except Exception as e:
+ self._LOG.error(str(e))
+ if not action or len(action) == 0 and "servers/detail" in env["PATH_INFO"]:
+ return "list"
+ if component == "swift":
+ path = env["PATH_INFO"]
+ method = env["REQUEST_METHOD"]
+ for api in SWIFT_API:
+ if re.match(api[0], path) and method == api[1]:
+ action = api[2]
+ return action
+
+ @staticmethod
+ def _get_object(env, component):
+ if component == "nova":
+ # get the object ID which is located before "action" in the URL
+ return env.get("PATH_INFO").split("/")[-2]
+ elif component == "swift":
+ # remove the "/v1/" part of the URL
+ return env.get("PATH_INFO").split("/", 2)[-1].replace("/", "-")
+ return "unknown"
+
+ def __call__(self, env, start_response):
+ req = webob.Request(env)
+
+ # token = req.headers.get('X-Auth-Token',
+ # req.headers.get('X-Storage-Token'))
+ # if not token:
+ # self._LOG.error("No token")
+ # return self._app(env, start_response)
+
+ subject_id = env.get("HTTP_X_USER_ID")
+ tenant_id = env.get("HTTP_X_TENANT_ID")
+ component = self._find_openstack_component(env)
+ action_id = self._get_action(env, component)
+ if action_id:
+ self._LOG.debug("OpenStack component {}".format(component))
+ object_id = self._get_object(env, component)
+ self._LOG.debug("{}-{}-{}-{}".format(subject_id, object_id, action_id, tenant_id))
+ self.__set_token()
+ resp = self._get_authz_from_moon(self.x_subject_token, tenant_id, subject_id, object_id, action_id)
+ self._LOG.info("Moon answer: {}-{}".format(resp.status_code, resp.content))
+ self.__unset_token()
+ if resp.status_code == 200:
+ try:
+ answer = json.loads(resp.content)
+ self._LOG.debug(answer)
+ if "authz" in answer and answer["authz"]:
+ return self._app(env, start_response)
+ except:
+ raise exception.Unauthorized(message="You are not authorized to do that!")
+ self._LOG.debug("No action_id found for {}".format(env.get("PATH_INFO")))
+ # If action is not found, we can't raise an exception because a lots of action is missing
+ # in function self._get_action, it is not possible to get them all.
+ return self._app(env, start_response)
+ # raise exception.Unauthorized(message="You are not authorized to do that!")
+
+
+def filter_factory(global_conf, **local_conf):
+ """Returns a WSGI filter app for use with paste.deploy."""
+ conf = global_conf.copy()
+ conf.update(local_conf)
+
+ def auth_filter(app):
+ return AuthZProtocol(app, conf)
+ return auth_filter
+
diff --git a/keystonemiddleware-moon/keystonemiddleware/ec2_token.py b/keystonemiddleware-moon/keystonemiddleware/ec2_token.py
new file mode 100644
index 00000000..df3bb6b0
--- /dev/null
+++ b/keystonemiddleware-moon/keystonemiddleware/ec2_token.py
@@ -0,0 +1,130 @@
+# Copyright 2012 OpenStack Foundation
+# Copyright 2010 United States Government as represented by the
+# Administrator of the National Aeronautics and Space Administration.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""
+Starting point for routing EC2 requests.
+
+"""
+
+from oslo_config import cfg
+from oslo_serialization import jsonutils
+import requests
+import webob.dec
+import webob.exc
+
+keystone_ec2_opts = [
+ cfg.StrOpt('url',
+ default='http://localhost:5000/v2.0/ec2tokens',
+ help='URL to get token from ec2 request.'),
+ cfg.StrOpt('keyfile',
+ help='Required if EC2 server requires client certificate.'),
+ cfg.StrOpt('certfile',
+ help='Client certificate key filename. Required if EC2 server '
+ 'requires client certificate.'),
+ cfg.StrOpt('cafile',
+ help='A PEM encoded certificate authority to use when '
+ 'verifying HTTPS connections. Defaults to the system '
+ 'CAs.'),
+ cfg.BoolOpt('insecure', default=False,
+ help='Disable SSL certificate verification.'),
+]
+
+CONF = cfg.CONF
+CONF.register_opts(keystone_ec2_opts, group='keystone_ec2_token')
+
+
+class EC2Token(object):
+ """Authenticate an EC2 request with keystone and convert to token."""
+
+ def __init__(self, application):
+ super(EC2Token, self).__init__()
+ self._application = application
+
+ @webob.dec.wsgify()
+ def __call__(self, req):
+ # Read request signature and access id.
+ try:
+ signature = req.params['Signature']
+ access = req.params['AWSAccessKeyId']
+ except KeyError:
+ raise webob.exc.HTTPBadRequest()
+
+ # Make a copy of args for authentication and signature verification.
+ auth_params = dict(req.params)
+ # Not part of authentication args
+ auth_params.pop('Signature')
+
+ # Authenticate the request.
+ creds = {
+ 'ec2Credentials': {
+ 'access': access,
+ 'signature': signature,
+ 'host': req.host,
+ 'verb': req.method,
+ 'path': req.path,
+ 'params': auth_params,
+ }
+ }
+ creds_json = jsonutils.dumps(creds)
+ headers = {'Content-Type': 'application/json'}
+
+ verify = True
+ if CONF.keystone_ec2_token.insecure:
+ verify = False
+ elif CONF.keystone_ec2_token.cafile:
+ verify = CONF.keystone_ec2_token.cafile
+
+ cert = None
+ if (CONF.keystone_ec2_token.certfile and
+ CONF.keystone_ec2_token.keyfile):
+ cert = (CONF.keystone_ec2_certfile,
+ CONF.keystone_ec2_token.keyfile)
+ elif CONF.keystone_ec2_token.certfile:
+ cert = CONF.keystone_ec2_token.certfile
+
+ response = requests.post(CONF.keystone_ec2_token.url, data=creds_json,
+ headers=headers, verify=verify, cert=cert)
+
+ # NOTE(vish): We could save a call to keystone by
+ # having keystone return token, tenant,
+ # user, and roles from this call.
+
+ result = response.json()
+ try:
+ token_id = result['access']['token']['id']
+ except (AttributeError, KeyError):
+ raise webob.exc.HTTPBadRequest()
+
+ # Authenticated!
+ req.headers['X-Auth-Token'] = token_id
+ return self._application
+
+
+def filter_factory(global_conf, **local_conf):
+ """Returns a WSGI filter app for use with paste.deploy."""
+ conf = global_conf.copy()
+ conf.update(local_conf)
+
+ def auth_filter(app):
+ return EC2Token(app, conf)
+ return auth_filter
+
+
+def app_factory(global_conf, **local_conf):
+ conf = global_conf.copy()
+ conf.update(local_conf)
+ return EC2Token(None, conf)
diff --git a/keystonemiddleware-moon/keystonemiddleware/i18n.py b/keystonemiddleware-moon/keystonemiddleware/i18n.py
new file mode 100644
index 00000000..09984607
--- /dev/null
+++ b/keystonemiddleware-moon/keystonemiddleware/i18n.py
@@ -0,0 +1,37 @@
+# Copyright 2014 IBM Corp.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""oslo.i18n integration module.
+
+See http://docs.openstack.org/developer/oslo.i18n/usage.html .
+
+"""
+
+from oslo import i18n
+
+
+_translators = i18n.TranslatorFactory(domain='keystonemiddleware')
+
+# The primary translation function using the well-known name "_"
+_ = _translators.primary
+
+# Translators for log levels.
+#
+# The abbreviated names are meant to reflect the usual use of a short
+# name like '_'. The "L" is for "log" and the other letter comes from
+# the level.
+_LI = _translators.log_info
+_LW = _translators.log_warning
+_LE = _translators.log_error
+_LC = _translators.log_critical
diff --git a/keystonemiddleware-moon/keystonemiddleware/openstack/__init__.py b/keystonemiddleware-moon/keystonemiddleware/openstack/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/keystonemiddleware-moon/keystonemiddleware/openstack/__init__.py
diff --git a/keystonemiddleware-moon/keystonemiddleware/openstack/common/__init__.py b/keystonemiddleware-moon/keystonemiddleware/openstack/common/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/keystonemiddleware-moon/keystonemiddleware/openstack/common/__init__.py
diff --git a/keystonemiddleware-moon/keystonemiddleware/openstack/common/memorycache.py b/keystonemiddleware-moon/keystonemiddleware/openstack/common/memorycache.py
new file mode 100644
index 00000000..f793c937
--- /dev/null
+++ b/keystonemiddleware-moon/keystonemiddleware/openstack/common/memorycache.py
@@ -0,0 +1,97 @@
+# Copyright 2010 United States Government as represented by the
+# Administrator of the National Aeronautics and Space Administration.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""Super simple fake memcache client."""
+
+import copy
+
+from oslo.config import cfg
+from oslo.utils import timeutils
+
+memcache_opts = [
+ cfg.ListOpt('memcached_servers',
+ help='Memcached servers or None for in process cache.'),
+]
+
+CONF = cfg.CONF
+CONF.register_opts(memcache_opts)
+
+
+def list_opts():
+ """Entry point for oslo.config-generator."""
+ return [(None, copy.deepcopy(memcache_opts))]
+
+
+def get_client(memcached_servers=None):
+ client_cls = Client
+
+ if not memcached_servers:
+ memcached_servers = CONF.memcached_servers
+ if memcached_servers:
+ import memcache
+ client_cls = memcache.Client
+
+ return client_cls(memcached_servers, debug=0)
+
+
+class Client(object):
+ """Replicates a tiny subset of memcached client interface."""
+
+ def __init__(self, *args, **kwargs):
+ """Ignores the passed in args."""
+ self.cache = {}
+
+ def get(self, key):
+ """Retrieves the value for a key or None.
+
+ This expunges expired keys during each get.
+ """
+
+ now = timeutils.utcnow_ts()
+ for k in list(self.cache):
+ (timeout, _value) = self.cache[k]
+ if timeout and now >= timeout:
+ del self.cache[k]
+
+ return self.cache.get(key, (0, None))[1]
+
+ def set(self, key, value, time=0, min_compress_len=0):
+ """Sets the value for a key."""
+ timeout = 0
+ if time != 0:
+ timeout = timeutils.utcnow_ts() + time
+ self.cache[key] = (timeout, value)
+ return True
+
+ def add(self, key, value, time=0, min_compress_len=0):
+ """Sets the value for a key if it doesn't exist."""
+ if self.get(key) is not None:
+ return False
+ return self.set(key, value, time, min_compress_len)
+
+ def incr(self, key, delta=1):
+ """Increments the value for a key."""
+ value = self.get(key)
+ if value is None:
+ return None
+ new_value = int(value) + delta
+ self.cache[key] = (self.cache[key][0], str(new_value))
+ return new_value
+
+ def delete(self, key, time=0):
+ """Deletes the value associated with a key."""
+ if key in self.cache:
+ del self.cache[key]
diff --git a/keystonemiddleware-moon/keystonemiddleware/opts.py b/keystonemiddleware-moon/keystonemiddleware/opts.py
new file mode 100644
index 00000000..62a7dabf
--- /dev/null
+++ b/keystonemiddleware-moon/keystonemiddleware/opts.py
@@ -0,0 +1,52 @@
+# Copyright (c) 2014 OpenStack Foundation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+__all__ = [
+ 'list_auth_token_opts',
+]
+
+import copy
+
+import keystonemiddleware.auth_token
+from keystonemiddleware.auth_token import _auth
+from keystonemiddleware.auth_token import _base
+
+auth_token_opts = [
+ (_base.AUTHTOKEN_GROUP,
+ keystonemiddleware.auth_token._OPTS +
+ _auth.AuthTokenPlugin.get_options())
+]
+
+
+def list_auth_token_opts():
+ """Return a list of oslo_config options available in auth_token middleware.
+
+ The returned list includes all oslo_config options which may be registered
+ at runtime by the project.
+
+ Each element of the list is a tuple. The first element is the name of the
+ group under which the list of elements in the second element will be
+ registered. A group name of None corresponds to the [DEFAULT] group in
+ config files.
+
+ This function is also discoverable via the entry point
+ 'keystonemiddleware.auth_token' under the 'oslo.config.opts'
+ namespace.
+
+ The purpose of this is to allow tools like the Oslo sample config file
+ generator to discover the options exposed to users by this middleware.
+
+ :returns: a list of (group_name, opts) tuples
+ """
+ return [(g, copy.deepcopy(o)) for g, o in auth_token_opts]
diff --git a/keystonemiddleware-moon/keystonemiddleware/s3_token.py b/keystonemiddleware-moon/keystonemiddleware/s3_token.py
new file mode 100644
index 00000000..d56482fd
--- /dev/null
+++ b/keystonemiddleware-moon/keystonemiddleware/s3_token.py
@@ -0,0 +1,267 @@
+# Copyright 2012 OpenStack Foundation
+# Copyright 2010 United States Government as represented by the
+# Administrator of the National Aeronautics and Space Administration.
+# Copyright 2011,2012 Akira YOSHIYAMA <akirayoshiyama@gmail.com>
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+# This source code is based ./auth_token.py and ./ec2_token.py.
+# See them for their copyright.
+
+"""
+S3 Token Middleware
+
+This WSGI component:
+
+* Gets a request from the swift3 middleware with an S3 Authorization
+ access key.
+* Validates s3 token in Keystone.
+* Transforms the account name to AUTH_%(tenant_name).
+
+"""
+
+import logging
+import webob
+
+from oslo_serialization import jsonutils
+import requests
+import six
+from six.moves import urllib
+
+from keystonemiddleware.i18n import _, _LI
+
+
+PROTOCOL_NAME = 'S3 Token Authentication'
+
+
+# TODO(kun): remove it after oslo merge this.
+def _split_path(path, minsegs=1, maxsegs=None, rest_with_last=False):
+ """Validate and split the given HTTP request path.
+
+ **Examples**::
+
+ ['a'] = _split_path('/a')
+ ['a', None] = _split_path('/a', 1, 2)
+ ['a', 'c'] = _split_path('/a/c', 1, 2)
+ ['a', 'c', 'o/r'] = _split_path('/a/c/o/r', 1, 3, True)
+
+ :param path: HTTP Request path to be split
+ :param minsegs: Minimum number of segments to be extracted
+ :param maxsegs: Maximum number of segments to be extracted
+ :param rest_with_last: If True, trailing data will be returned as part
+ of last segment. If False, and there is
+ trailing data, raises ValueError.
+ :returns: list of segments with a length of maxsegs (non-existent
+ segments will return as None)
+ :raises: ValueError if given an invalid path
+ """
+ if not maxsegs:
+ maxsegs = minsegs
+ if minsegs > maxsegs:
+ raise ValueError(_('minsegs > maxsegs: %(min)d > %(max)d)') %
+ {'min': minsegs, 'max': maxsegs})
+ if rest_with_last:
+ segs = path.split('/', maxsegs)
+ minsegs += 1
+ maxsegs += 1
+ count = len(segs)
+ if (segs[0] or count < minsegs or count > maxsegs or
+ '' in segs[1:minsegs]):
+ raise ValueError(_('Invalid path: %s') % urllib.parse.quote(path))
+ else:
+ minsegs += 1
+ maxsegs += 1
+ segs = path.split('/', maxsegs)
+ count = len(segs)
+ if (segs[0] or count < minsegs or count > maxsegs + 1 or
+ '' in segs[1:minsegs] or
+ (count == maxsegs + 1 and segs[maxsegs])):
+ raise ValueError(_('Invalid path: %s') % urllib.parse.quote(path))
+ segs = segs[1:maxsegs]
+ segs.extend([None] * (maxsegs - 1 - len(segs)))
+ return segs
+
+
+class ServiceError(Exception):
+ pass
+
+
+class S3Token(object):
+ """Middleware that handles S3 authentication."""
+
+ def __init__(self, app, conf):
+ """Common initialization code."""
+ self._app = app
+ self._logger = logging.getLogger(conf.get('log_name', __name__))
+ self._logger.debug('Starting the %s component', PROTOCOL_NAME)
+ self._reseller_prefix = conf.get('reseller_prefix', 'AUTH_')
+ # where to find the auth service (we use this to validate tokens)
+
+ auth_host = conf.get('auth_host')
+ auth_port = int(conf.get('auth_port', 35357))
+ auth_protocol = conf.get('auth_protocol', 'https')
+
+ self._request_uri = '%s://%s:%s' % (auth_protocol, auth_host,
+ auth_port)
+
+ # SSL
+ insecure = conf.get('insecure', False)
+ cert_file = conf.get('certfile')
+ key_file = conf.get('keyfile')
+
+ if insecure:
+ self._verify = False
+ elif cert_file and key_file:
+ self._verify = (cert_file, key_file)
+ elif cert_file:
+ self._verify = cert_file
+ else:
+ self._verify = None
+
+ def _deny_request(self, code):
+ error_table = {
+ 'AccessDenied': (401, 'Access denied'),
+ 'InvalidURI': (400, 'Could not parse the specified URI'),
+ }
+ resp = webob.Response(content_type='text/xml')
+ resp.status = error_table[code][0]
+ error_msg = ('<?xml version="1.0" encoding="UTF-8"?>\r\n'
+ '<Error>\r\n <Code>%s</Code>\r\n '
+ '<Message>%s</Message>\r\n</Error>\r\n' %
+ (code, error_table[code][1]))
+ if six.PY3:
+ error_msg = error_msg.encode()
+ resp.body = error_msg
+ return resp
+
+ def _json_request(self, creds_json):
+ headers = {'Content-Type': 'application/json'}
+ try:
+ response = requests.post('%s/v2.0/s3tokens' % self._request_uri,
+ headers=headers, data=creds_json,
+ verify=self._verify)
+ except requests.exceptions.RequestException as e:
+ self._logger.info(_LI('HTTP connection exception: %s'), e)
+ resp = self._deny_request('InvalidURI')
+ raise ServiceError(resp)
+
+ if response.status_code < 200 or response.status_code >= 300:
+ self._logger.debug('Keystone reply error: status=%s reason=%s',
+ response.status_code, response.reason)
+ resp = self._deny_request('AccessDenied')
+ raise ServiceError(resp)
+
+ return response
+
+ def __call__(self, environ, start_response):
+ """Handle incoming request. authenticate and send downstream."""
+ req = webob.Request(environ)
+ self._logger.debug('Calling S3Token middleware.')
+
+ try:
+ parts = _split_path(req.path, 1, 4, True)
+ version, account, container, obj = parts
+ except ValueError:
+ msg = 'Not a path query, skipping.'
+ self._logger.debug(msg)
+ return self._app(environ, start_response)
+
+ # Read request signature and access id.
+ if 'Authorization' not in req.headers:
+ msg = 'No Authorization header. skipping.'
+ self._logger.debug(msg)
+ return self._app(environ, start_response)
+
+ token = req.headers.get('X-Auth-Token',
+ req.headers.get('X-Storage-Token'))
+ if not token:
+ msg = 'You did not specify an auth or a storage token. skipping.'
+ self._logger.debug(msg)
+ return self._app(environ, start_response)
+
+ auth_header = req.headers['Authorization']
+ try:
+ access, signature = auth_header.split(' ')[-1].rsplit(':', 1)
+ except ValueError:
+ msg = 'You have an invalid Authorization header: %s'
+ self._logger.debug(msg, auth_header)
+ return self._deny_request('InvalidURI')(environ, start_response)
+
+ # NOTE(chmou): This is to handle the special case with nova
+ # when we have the option s3_affix_tenant. We will force it to
+ # connect to another account than the one
+ # authenticated. Before people start getting worried about
+ # security, I should point that we are connecting with
+ # username/token specified by the user but instead of
+ # connecting to its own account we will force it to go to an
+ # another account. In a normal scenario if that user don't
+ # have the reseller right it will just fail but since the
+ # reseller account can connect to every account it is allowed
+ # by the swift_auth middleware.
+ force_tenant = None
+ if ':' in access:
+ access, force_tenant = access.split(':')
+
+ # Authenticate request.
+ creds = {'credentials': {'access': access,
+ 'token': token,
+ 'signature': signature}}
+ creds_json = jsonutils.dumps(creds)
+ self._logger.debug('Connecting to Keystone sending this JSON: %s',
+ creds_json)
+ # NOTE(vish): We could save a call to keystone by having
+ # keystone return token, tenant, user, and roles
+ # from this call.
+ #
+ # NOTE(chmou): We still have the same problem we would need to
+ # change token_auth to detect if we already
+ # identified and not doing a second query and just
+ # pass it through to swiftauth in this case.
+ try:
+ resp = self._json_request(creds_json)
+ except ServiceError as e:
+ resp = e.args[0]
+ msg = 'Received error, exiting middleware with error: %s'
+ self._logger.debug(msg, resp.status_code)
+ return resp(environ, start_response)
+
+ self._logger.debug('Keystone Reply: Status: %d, Output: %s',
+ resp.status_code, resp.content)
+
+ try:
+ identity_info = resp.json()
+ token_id = str(identity_info['access']['token']['id'])
+ tenant = identity_info['access']['token']['tenant']
+ except (ValueError, KeyError):
+ error = 'Error on keystone reply: %d %s'
+ self._logger.debug(error, resp.status_code, resp.content)
+ return self._deny_request('InvalidURI')(environ, start_response)
+
+ req.headers['X-Auth-Token'] = token_id
+ tenant_to_connect = force_tenant or tenant['id']
+ self._logger.debug('Connecting with tenant: %s', tenant_to_connect)
+ new_tenant_name = '%s%s' % (self._reseller_prefix, tenant_to_connect)
+ environ['PATH_INFO'] = environ['PATH_INFO'].replace(account,
+ new_tenant_name)
+ return self._app(environ, start_response)
+
+
+def filter_factory(global_conf, **local_conf):
+ """Returns a WSGI filter app for use with paste.deploy."""
+ conf = global_conf.copy()
+ conf.update(local_conf)
+
+ def auth_filter(app):
+ return S3Token(app, conf)
+ return auth_filter
diff --git a/keystonemiddleware-moon/keystonemiddleware/tests/__init__.py b/keystonemiddleware-moon/keystonemiddleware/tests/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/keystonemiddleware-moon/keystonemiddleware/tests/__init__.py
diff --git a/keystonemiddleware-moon/keystonemiddleware/tests/unit/__init__.py b/keystonemiddleware-moon/keystonemiddleware/tests/unit/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/keystonemiddleware-moon/keystonemiddleware/tests/unit/__init__.py
diff --git a/keystonemiddleware-moon/keystonemiddleware/tests/unit/auth_token/__init__.py b/keystonemiddleware-moon/keystonemiddleware/tests/unit/auth_token/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/keystonemiddleware-moon/keystonemiddleware/tests/unit/auth_token/__init__.py
diff --git a/keystonemiddleware-moon/keystonemiddleware/tests/unit/auth_token/test_auth.py b/keystonemiddleware-moon/keystonemiddleware/tests/unit/auth_token/test_auth.py
new file mode 100644
index 00000000..517d597b
--- /dev/null
+++ b/keystonemiddleware-moon/keystonemiddleware/tests/unit/auth_token/test_auth.py
@@ -0,0 +1,102 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import logging
+import uuid
+
+from keystoneclient import auth
+from keystoneclient import fixture
+from keystoneclient import session
+from requests_mock.contrib import fixture as rm_fixture
+import six
+import testtools
+
+from keystonemiddleware.auth_token import _auth
+
+
+class DefaultAuthPluginTests(testtools.TestCase):
+
+ def new_plugin(self, auth_host=None, auth_port=None, auth_protocol=None,
+ auth_admin_prefix=None, admin_user=None,
+ admin_password=None, admin_tenant_name=None,
+ admin_token=None, identity_uri=None, log=None):
+ if not log:
+ log = self.logger
+
+ return _auth.AuthTokenPlugin.load_from_options(
+ auth_host=auth_host,
+ auth_port=auth_port,
+ auth_protocol=auth_protocol,
+ auth_admin_prefix=auth_admin_prefix,
+ admin_user=admin_user,
+ admin_password=admin_password,
+ admin_tenant_name=admin_tenant_name,
+ admin_token=admin_token,
+ identity_uri=identity_uri,
+ log=log)
+
+ def setUp(self):
+ super(DefaultAuthPluginTests, self).setUp()
+
+ self.stream = six.StringIO()
+ self.logger = logging.getLogger(__name__)
+ self.session = session.Session()
+ self.requests = self.useFixture(rm_fixture.Fixture())
+
+ def test_auth_uri_from_fragments(self):
+ auth_protocol = 'http'
+ auth_host = 'testhost'
+ auth_port = 8888
+ auth_admin_prefix = 'admin'
+
+ expected = '%s://%s:%d/admin' % (auth_protocol, auth_host, auth_port)
+
+ plugin = self.new_plugin(auth_host=auth_host,
+ auth_protocol=auth_protocol,
+ auth_port=auth_port,
+ auth_admin_prefix=auth_admin_prefix)
+
+ self.assertEqual(expected,
+ plugin.get_endpoint(self.session,
+ interface=auth.AUTH_INTERFACE))
+
+ def test_identity_uri_overrides_fragments(self):
+ identity_uri = 'http://testhost:8888/admin'
+ plugin = self.new_plugin(identity_uri=identity_uri,
+ auth_host='anotherhost',
+ auth_port=9999,
+ auth_protocol='ftp')
+
+ self.assertEqual(identity_uri,
+ plugin.get_endpoint(self.session,
+ interface=auth.AUTH_INTERFACE))
+
+ def test_with_admin_token(self):
+ token = uuid.uuid4().hex
+ plugin = self.new_plugin(identity_uri='http://testhost:8888/admin',
+ admin_token=token)
+ self.assertEqual(token, plugin.get_token(self.session))
+
+ def test_with_user_pass(self):
+ base_uri = 'http://testhost:8888/admin'
+ token = fixture.V2Token()
+ admin_tenant_name = uuid.uuid4().hex
+
+ self.requests.post(base_uri + '/v2.0/tokens',
+ json=token)
+
+ plugin = self.new_plugin(identity_uri=base_uri,
+ admin_user=uuid.uuid4().hex,
+ admin_password=uuid.uuid4().hex,
+ admin_tenant_name=admin_tenant_name)
+
+ self.assertEqual(token.token_id, plugin.get_token(self.session))
diff --git a/keystonemiddleware-moon/keystonemiddleware/tests/unit/auth_token/test_auth_token_middleware.py b/keystonemiddleware-moon/keystonemiddleware/tests/unit/auth_token/test_auth_token_middleware.py
new file mode 100644
index 00000000..97fcc557
--- /dev/null
+++ b/keystonemiddleware-moon/keystonemiddleware/tests/unit/auth_token/test_auth_token_middleware.py
@@ -0,0 +1,2763 @@
+# Copyright 2012 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import calendar
+import datetime
+import json
+import logging
+import os
+import shutil
+import stat
+import tempfile
+import time
+import uuid
+
+import fixtures
+from keystoneclient import access
+from keystoneclient import auth
+from keystoneclient.common import cms
+from keystoneclient import exceptions
+from keystoneclient import fixture
+from keystoneclient import session
+import mock
+from oslo_config import fixture as cfg_fixture
+from oslo_serialization import jsonutils
+from oslo_utils import timeutils
+from requests_mock.contrib import fixture as rm_fixture
+import six
+import testresources
+import testtools
+from testtools import matchers
+import webob
+import webob.dec
+
+from keystonemiddleware import auth_token
+from keystonemiddleware.auth_token import _base
+from keystonemiddleware.auth_token import _exceptions as exc
+from keystonemiddleware.auth_token import _revocations
+from keystonemiddleware.openstack.common import memorycache
+from keystonemiddleware.tests.unit import client_fixtures
+from keystonemiddleware.tests.unit import utils
+
+
+EXPECTED_V2_DEFAULT_ENV_RESPONSE = {
+ 'HTTP_X_IDENTITY_STATUS': 'Confirmed',
+ 'HTTP_X_TENANT_ID': 'tenant_id1',
+ 'HTTP_X_TENANT_NAME': 'tenant_name1',
+ 'HTTP_X_USER_ID': 'user_id1',
+ 'HTTP_X_USER_NAME': 'user_name1',
+ 'HTTP_X_ROLES': 'role1,role2',
+ 'HTTP_X_USER': 'user_name1', # deprecated (diablo-compat)
+ 'HTTP_X_TENANT': 'tenant_name1', # deprecated (diablo-compat)
+ 'HTTP_X_ROLE': 'role1,role2', # deprecated (diablo-compat)
+}
+
+EXPECTED_V2_DEFAULT_SERVICE_ENV_RESPONSE = {
+ 'HTTP_X_SERVICE_IDENTITY_STATUS': 'Confirmed',
+ 'HTTP_X_SERVICE_PROJECT_ID': 'service_project_id1',
+ 'HTTP_X_SERVICE_PROJECT_NAME': 'service_project_name1',
+ 'HTTP_X_SERVICE_USER_ID': 'service_user_id1',
+ 'HTTP_X_SERVICE_USER_NAME': 'service_user_name1',
+ 'HTTP_X_SERVICE_ROLES': 'service_role1,service_role2',
+}
+
+EXPECTED_V3_DEFAULT_ENV_ADDITIONS = {
+ 'HTTP_X_PROJECT_DOMAIN_ID': 'domain_id1',
+ 'HTTP_X_PROJECT_DOMAIN_NAME': 'domain_name1',
+ 'HTTP_X_USER_DOMAIN_ID': 'domain_id1',
+ 'HTTP_X_USER_DOMAIN_NAME': 'domain_name1',
+}
+
+EXPECTED_V3_DEFAULT_SERVICE_ENV_ADDITIONS = {
+ 'HTTP_X_SERVICE_PROJECT_DOMAIN_ID': 'service_domain_id1',
+ 'HTTP_X_SERVICE_PROJECT_DOMAIN_NAME': 'service_domain_name1',
+ 'HTTP_X_SERVICE_USER_DOMAIN_ID': 'service_domain_id1',
+ 'HTTP_X_SERVICE_USER_DOMAIN_NAME': 'service_domain_name1'
+}
+
+
+BASE_HOST = 'https://keystone.example.com:1234'
+BASE_URI = '%s/testadmin' % BASE_HOST
+FAKE_ADMIN_TOKEN_ID = 'admin_token2'
+FAKE_ADMIN_TOKEN = jsonutils.dumps(
+ {'access': {'token': {'id': FAKE_ADMIN_TOKEN_ID,
+ 'expires': '2022-10-03T16:58:01Z'}}})
+
+VERSION_LIST_v3 = fixture.DiscoveryList(href=BASE_URI)
+VERSION_LIST_v2 = fixture.DiscoveryList(v3=False, href=BASE_URI)
+
+ERROR_TOKEN = '7ae290c2a06244c4b41692eb4e9225f2'
+MEMCACHED_SERVERS = ['localhost:11211']
+MEMCACHED_AVAILABLE = None
+
+
+def memcached_available():
+ """Do a sanity check against memcached.
+
+ Returns ``True`` if the following conditions are met (otherwise, returns
+ ``False``):
+
+ - ``python-memcached`` is installed
+ - a usable ``memcached`` instance is available via ``MEMCACHED_SERVERS``
+ - the client is able to set and get a key/value pair
+
+ """
+ global MEMCACHED_AVAILABLE
+
+ if MEMCACHED_AVAILABLE is None:
+ try:
+ import memcache
+ c = memcache.Client(MEMCACHED_SERVERS)
+ c.set('ping', 'pong', time=1)
+ MEMCACHED_AVAILABLE = c.get('ping') == 'pong'
+ except ImportError:
+ MEMCACHED_AVAILABLE = False
+
+ return MEMCACHED_AVAILABLE
+
+
+def cleanup_revoked_file(filename):
+ try:
+ os.remove(filename)
+ except OSError:
+ pass
+
+
+class TimezoneFixture(fixtures.Fixture):
+ @staticmethod
+ def supported():
+ # tzset is only supported on Unix.
+ return hasattr(time, 'tzset')
+
+ def __init__(self, new_tz):
+ super(TimezoneFixture, self).__init__()
+ self.tz = new_tz
+ self.old_tz = os.environ.get('TZ')
+
+ def setUp(self):
+ super(TimezoneFixture, self).setUp()
+ if not self.supported():
+ raise NotImplementedError('timezone override is not supported.')
+ os.environ['TZ'] = self.tz
+ time.tzset()
+ self.addCleanup(self.cleanup)
+
+ def cleanup(self):
+ if self.old_tz is not None:
+ os.environ['TZ'] = self.old_tz
+ elif 'TZ' in os.environ:
+ del os.environ['TZ']
+ time.tzset()
+
+
+class TimeFixture(fixtures.Fixture):
+
+ def __init__(self, new_time, normalize=True):
+ super(TimeFixture, self).__init__()
+ if isinstance(new_time, six.string_types):
+ new_time = timeutils.parse_isotime(new_time)
+ if normalize:
+ new_time = timeutils.normalize_time(new_time)
+ self.new_time = new_time
+
+ def setUp(self):
+ super(TimeFixture, self).setUp()
+ timeutils.set_time_override(self.new_time)
+ self.addCleanup(timeutils.clear_time_override)
+
+
+class FakeApp(object):
+ """This represents a WSGI app protected by the auth_token middleware."""
+
+ SUCCESS = b'SUCCESS'
+ FORBIDDEN = b'FORBIDDEN'
+ expected_env = {}
+
+ def __init__(self, expected_env=None, need_service_token=False):
+ self.expected_env = dict(EXPECTED_V2_DEFAULT_ENV_RESPONSE)
+
+ if expected_env:
+ self.expected_env.update(expected_env)
+
+ self.need_service_token = need_service_token
+
+ def __call__(self, env, start_response):
+ for k, v in self.expected_env.items():
+ assert env[k] == v, '%s != %s' % (env[k], v)
+
+ resp = webob.Response()
+
+ if (env.get('HTTP_X_IDENTITY_STATUS') == 'Invalid'
+ and env['HTTP_X_SERVICE_IDENTITY_STATUS'] == 'Invalid'):
+ # Simulate delayed auth forbidding access with arbitrary status
+ # code to differentiate checking this code path
+ resp.status = 419
+ resp.body = FakeApp.FORBIDDEN
+ elif env.get('HTTP_X_SERVICE_IDENTITY_STATUS') == 'Invalid':
+ # Simulate delayed auth forbidding access with arbitrary status
+ # code to differentiate checking this code path
+ resp.status = 420
+ resp.body = FakeApp.FORBIDDEN
+ elif env['HTTP_X_IDENTITY_STATUS'] == 'Invalid':
+ # Simulate delayed auth forbidding access
+ resp.status = 403
+ resp.body = FakeApp.FORBIDDEN
+ elif (self.need_service_token is True and
+ env.get('HTTP_X_SERVICE_TOKEN') is None):
+ # Simulate requiring composite auth
+ # Arbitrary value to allow checking this code path
+ resp.status = 418
+ resp.body = FakeApp.FORBIDDEN
+ else:
+ resp.body = FakeApp.SUCCESS
+
+ return resp(env, start_response)
+
+
+class v3FakeApp(FakeApp):
+ """This represents a v3 WSGI app protected by the auth_token middleware."""
+
+ def __init__(self, expected_env=None, need_service_token=False):
+
+ # with v3 additions, these are for the DEFAULT TOKEN
+ v3_default_env_additions = dict(EXPECTED_V3_DEFAULT_ENV_ADDITIONS)
+ if expected_env:
+ v3_default_env_additions.update(expected_env)
+ super(v3FakeApp, self).__init__(expected_env=v3_default_env_additions,
+ need_service_token=need_service_token)
+
+
+class CompositeBase(object):
+ """Base composite auth object with common service token environment."""
+
+ def __init__(self, expected_env=None):
+ comp_expected_env = dict(EXPECTED_V2_DEFAULT_SERVICE_ENV_RESPONSE)
+
+ if expected_env:
+ comp_expected_env.update(expected_env)
+
+ super(CompositeBase, self).__init__(
+ expected_env=comp_expected_env, need_service_token=True)
+
+
+class CompositeFakeApp(CompositeBase, FakeApp):
+ """A fake v2 WSGI app protected by composite auth_token middleware."""
+
+ def __init__(self, expected_env):
+ super(CompositeFakeApp, self).__init__(expected_env=expected_env)
+
+
+class v3CompositeFakeApp(CompositeBase, v3FakeApp):
+ """A fake v3 WSGI app protected by composite auth_token middleware."""
+
+ def __init__(self, expected_env=None):
+
+ # with v3 additions, these are for the DEFAULT SERVICE TOKEN
+ v3_default_service_env_additions = dict(
+ EXPECTED_V3_DEFAULT_SERVICE_ENV_ADDITIONS)
+
+ if expected_env:
+ v3_default_service_env_additions.update(expected_env)
+
+ super(v3CompositeFakeApp, self).__init__(
+ v3_default_service_env_additions)
+
+
+def new_app(status, body, headers={}):
+
+ class _App(object):
+
+ def __init__(self, expected_env=None):
+ self.expected_env = expected_env
+
+ @webob.dec.wsgify
+ def __call__(self, req):
+ resp = webob.Response(body, status)
+ resp.headers.update(headers)
+ return resp
+
+ return _App
+
+
+class BaseAuthTokenMiddlewareTest(testtools.TestCase):
+ """Base test class for auth_token middleware.
+
+ All the tests allow for running with auth_token
+ configured for receiving v2 or v3 tokens, with the
+ choice being made by passing configuration data into
+ setUp().
+
+ The base class will, by default, run all the tests
+ expecting v2 token formats. Child classes can override
+ this to specify, for instance, v3 format.
+
+ """
+ def setUp(self, expected_env=None, auth_version=None, fake_app=None):
+ super(BaseAuthTokenMiddlewareTest, self).setUp()
+
+ self.expected_env = expected_env or dict()
+ self.fake_app = fake_app or FakeApp
+ self.middleware = None
+ self.requests = self.useFixture(rm_fixture.Fixture())
+
+ signing_dir = self._setup_signing_directory()
+
+ self.conf = {
+ 'identity_uri': 'https://keystone.example.com:1234/testadmin/',
+ 'signing_dir': signing_dir,
+ 'auth_version': auth_version,
+ 'auth_uri': 'https://keystone.example.com:1234',
+ 'admin_user': uuid.uuid4().hex,
+ }
+
+ self.auth_version = auth_version
+ self.response_status = None
+ self.response_headers = None
+
+ def _setup_signing_directory(self):
+ directory_name = self.useFixture(fixtures.TempDir()).path
+
+ # Copy the sample certificate files into the temporary directory.
+ for filename in ['cacert.pem', 'signing_cert.pem', ]:
+ shutil.copy2(os.path.join(client_fixtures.CERTDIR, filename),
+ os.path.join(directory_name, filename))
+
+ return directory_name
+
+ def set_middleware(self, expected_env=None, conf=None):
+ """Configure the class ready to call the auth_token middleware.
+
+ Set up the various fake items needed to run the middleware.
+ Individual tests that need to further refine these can call this
+ function to override the class defaults.
+
+ """
+ if conf:
+ self.conf.update(conf)
+
+ if expected_env:
+ self.expected_env.update(expected_env)
+
+ self.middleware = auth_token.AuthProtocol(
+ self.fake_app(self.expected_env), self.conf)
+
+ self.middleware._revocations._list = jsonutils.dumps(
+ {"revoked": [], "extra": "success"})
+
+ def update_expected_env(self, expected_env={}):
+ self.middleware._app.expected_env.update(expected_env)
+
+ def purge_token_expected_env(self):
+ for key in six.iterkeys(self.token_expected_env):
+ del self.middleware._app.expected_env[key]
+
+ def purge_service_token_expected_env(self):
+ for key in six.iterkeys(self.service_token_expected_env):
+ del self.middleware._app.expected_env[key]
+
+ def start_fake_response(self, status, headers, exc_info=None):
+ self.response_status = int(status.split(' ', 1)[0])
+ self.response_headers = dict(headers)
+
+ def assertLastPath(self, path):
+ if path:
+ self.assertEqual(BASE_URI + path, self.requests.last_request.url)
+ else:
+ self.assertIsNone(self.requests.last_request)
+
+
+class DiabloAuthTokenMiddlewareTest(BaseAuthTokenMiddlewareTest,
+ testresources.ResourcedTestCase):
+
+ resources = [('examples', client_fixtures.EXAMPLES_RESOURCE)]
+
+ """Auth Token middleware should understand Diablo keystone responses."""
+ def setUp(self):
+ # pre-diablo only had Tenant ID, which was also the Name
+ expected_env = {
+ 'HTTP_X_TENANT_ID': 'tenant_id1',
+ 'HTTP_X_TENANT_NAME': 'tenant_id1',
+ # now deprecated (diablo-compat)
+ 'HTTP_X_TENANT': 'tenant_id1',
+ }
+
+ super(DiabloAuthTokenMiddlewareTest, self).setUp(
+ expected_env=expected_env)
+
+ self.requests.get(BASE_URI,
+ json=VERSION_LIST_v2,
+ status_code=300)
+
+ self.requests.post("%s/v2.0/tokens" % BASE_URI,
+ text=FAKE_ADMIN_TOKEN)
+
+ self.token_id = self.examples.VALID_DIABLO_TOKEN
+ token_response = self.examples.JSON_TOKEN_RESPONSES[self.token_id]
+
+ url = "%s/v2.0/tokens/%s" % (BASE_URI, self.token_id)
+ self.requests.get(url, text=token_response)
+
+ self.set_middleware()
+
+ def test_valid_diablo_response(self):
+ req = webob.Request.blank('/')
+ req.headers['X-Auth-Token'] = self.token_id
+ self.middleware(req.environ, self.start_fake_response)
+ self.assertEqual(self.response_status, 200)
+ self.assertIn('keystone.token_info', req.environ)
+
+
+class NoMemcacheAuthToken(BaseAuthTokenMiddlewareTest):
+ """These tests will not have the memcache module available."""
+
+ def setUp(self):
+ super(NoMemcacheAuthToken, self).setUp()
+ self.useFixture(utils.DisableModuleFixture('memcache'))
+
+ def test_nomemcache(self):
+ conf = {
+ 'admin_token': 'admin_token1',
+ 'auth_host': 'keystone.example.com',
+ 'auth_port': '1234',
+ 'memcached_servers': ','.join(MEMCACHED_SERVERS),
+ 'auth_uri': 'https://keystone.example.com:1234',
+ }
+
+ auth_token.AuthProtocol(FakeApp(), conf)
+
+
+class CachePoolTest(BaseAuthTokenMiddlewareTest):
+ def test_use_cache_from_env(self):
+ """If `swift.cache` is set in the environment and `cache` is set in the
+ config then the env cache is used.
+ """
+ env = {'swift.cache': 'CACHE_TEST'}
+ conf = {
+ 'cache': 'swift.cache'
+ }
+ self.set_middleware(conf=conf)
+ self.middleware._token_cache.initialize(env)
+ with self.middleware._token_cache._cache_pool.reserve() as cache:
+ self.assertEqual(cache, 'CACHE_TEST')
+
+ def test_not_use_cache_from_env(self):
+ """If `swift.cache` is set in the environment but `cache` isn't set in
+ the config then the env cache isn't used.
+ """
+ self.set_middleware()
+ env = {'swift.cache': 'CACHE_TEST'}
+ self.middleware._token_cache.initialize(env)
+ with self.middleware._token_cache._cache_pool.reserve() as cache:
+ self.assertNotEqual(cache, 'CACHE_TEST')
+
+ def test_multiple_context_managers_share_single_client(self):
+ self.set_middleware()
+ token_cache = self.middleware._token_cache
+ env = {}
+ token_cache.initialize(env)
+
+ caches = []
+
+ with token_cache._cache_pool.reserve() as cache:
+ caches.append(cache)
+
+ with token_cache._cache_pool.reserve() as cache:
+ caches.append(cache)
+
+ self.assertIs(caches[0], caches[1])
+ self.assertEqual(set(caches), set(token_cache._cache_pool))
+
+ def test_nested_context_managers_create_multiple_clients(self):
+ self.set_middleware()
+ env = {}
+ self.middleware._token_cache.initialize(env)
+ token_cache = self.middleware._token_cache
+
+ with token_cache._cache_pool.reserve() as outer_cache:
+ with token_cache._cache_pool.reserve() as inner_cache:
+ self.assertNotEqual(outer_cache, inner_cache)
+
+ self.assertEqual(
+ set([inner_cache, outer_cache]),
+ set(token_cache._cache_pool))
+
+
+class GeneralAuthTokenMiddlewareTest(BaseAuthTokenMiddlewareTest,
+ testresources.ResourcedTestCase):
+ """These tests are not affected by the token format
+ (see CommonAuthTokenMiddlewareTest).
+ """
+
+ resources = [('examples', client_fixtures.EXAMPLES_RESOURCE)]
+
+ def test_token_is_v2_accepts_v2(self):
+ token = self.examples.UUID_TOKEN_DEFAULT
+ token_response = self.examples.TOKEN_RESPONSES[token]
+ self.assertTrue(auth_token._token_is_v2(token_response))
+
+ def test_token_is_v2_rejects_v3(self):
+ token = self.examples.v3_UUID_TOKEN_DEFAULT
+ token_response = self.examples.TOKEN_RESPONSES[token]
+ self.assertFalse(auth_token._token_is_v2(token_response))
+
+ def test_token_is_v3_rejects_v2(self):
+ token = self.examples.UUID_TOKEN_DEFAULT
+ token_response = self.examples.TOKEN_RESPONSES[token]
+ self.assertFalse(auth_token._token_is_v3(token_response))
+
+ def test_token_is_v3_accepts_v3(self):
+ token = self.examples.v3_UUID_TOKEN_DEFAULT
+ token_response = self.examples.TOKEN_RESPONSES[token]
+ self.assertTrue(auth_token._token_is_v3(token_response))
+
+ @testtools.skipUnless(memcached_available(), 'memcached not available')
+ def test_encrypt_cache_data(self):
+ conf = {
+ 'memcached_servers': ','.join(MEMCACHED_SERVERS),
+ 'memcache_security_strategy': 'encrypt',
+ 'memcache_secret_key': 'mysecret'
+ }
+ self.set_middleware(conf=conf)
+ token = b'my_token'
+ some_time_later = timeutils.utcnow() + datetime.timedelta(hours=4)
+ expires = timeutils.strtime(some_time_later)
+ data = ('this_data', expires)
+ token_cache = self.middleware._token_cache
+ token_cache.initialize({})
+ token_cache._cache_store(token, data)
+ self.assertEqual(token_cache._cache_get(token), data[0])
+
+ @testtools.skipUnless(memcached_available(), 'memcached not available')
+ def test_sign_cache_data(self):
+ conf = {
+ 'memcached_servers': ','.join(MEMCACHED_SERVERS),
+ 'memcache_security_strategy': 'mac',
+ 'memcache_secret_key': 'mysecret'
+ }
+ self.set_middleware(conf=conf)
+ token = b'my_token'
+ some_time_later = timeutils.utcnow() + datetime.timedelta(hours=4)
+ expires = timeutils.strtime(some_time_later)
+ data = ('this_data', expires)
+ token_cache = self.middleware._token_cache
+ token_cache.initialize({})
+ token_cache._cache_store(token, data)
+ self.assertEqual(token_cache._cache_get(token), data[0])
+
+ @testtools.skipUnless(memcached_available(), 'memcached not available')
+ def test_no_memcache_protection(self):
+ conf = {
+ 'memcached_servers': ','.join(MEMCACHED_SERVERS),
+ 'memcache_secret_key': 'mysecret'
+ }
+ self.set_middleware(conf=conf)
+ token = 'my_token'
+ some_time_later = timeutils.utcnow() + datetime.timedelta(hours=4)
+ expires = timeutils.strtime(some_time_later)
+ data = ('this_data', expires)
+ token_cache = self.middleware._token_cache
+ token_cache.initialize({})
+ token_cache._cache_store(token, data)
+ self.assertEqual(token_cache._cache_get(token), data[0])
+
+ def test_assert_valid_memcache_protection_config(self):
+ # test missing memcache_secret_key
+ conf = {
+ 'memcached_servers': ','.join(MEMCACHED_SERVERS),
+ 'memcache_security_strategy': 'Encrypt'
+ }
+ self.assertRaises(exc.ConfigurationError, self.set_middleware,
+ conf=conf)
+ # test invalue memcache_security_strategy
+ conf = {
+ 'memcached_servers': ','.join(MEMCACHED_SERVERS),
+ 'memcache_security_strategy': 'whatever'
+ }
+ self.assertRaises(exc.ConfigurationError, self.set_middleware,
+ conf=conf)
+ # test missing memcache_secret_key
+ conf = {
+ 'memcached_servers': ','.join(MEMCACHED_SERVERS),
+ 'memcache_security_strategy': 'mac'
+ }
+ self.assertRaises(exc.ConfigurationError, self.set_middleware,
+ conf=conf)
+ conf = {
+ 'memcached_servers': ','.join(MEMCACHED_SERVERS),
+ 'memcache_security_strategy': 'Encrypt',
+ 'memcache_secret_key': ''
+ }
+ self.assertRaises(exc.ConfigurationError, self.set_middleware,
+ conf=conf)
+ conf = {
+ 'memcached_servers': ','.join(MEMCACHED_SERVERS),
+ 'memcache_security_strategy': 'mAc',
+ 'memcache_secret_key': ''
+ }
+ self.assertRaises(exc.ConfigurationError, self.set_middleware,
+ conf=conf)
+
+ def test_config_revocation_cache_timeout(self):
+ conf = {
+ 'revocation_cache_time': '24',
+ 'auth_uri': 'https://keystone.example.com:1234',
+ 'admin_user': uuid.uuid4().hex
+ }
+ middleware = auth_token.AuthProtocol(self.fake_app, conf)
+ self.assertEqual(middleware._revocations._cache_timeout,
+ datetime.timedelta(seconds=24))
+
+ def test_conf_values_type_convert(self):
+ conf = {
+ 'revocation_cache_time': '24',
+ 'identity_uri': 'https://keystone.example.com:1234',
+ 'include_service_catalog': '0',
+ 'nonexsit_option': '0',
+ }
+
+ middleware = auth_token.AuthProtocol(self.fake_app, conf)
+ self.assertEqual(datetime.timedelta(seconds=24),
+ middleware._revocations._cache_timeout)
+ self.assertEqual(False, middleware._include_service_catalog)
+ self.assertEqual('0', middleware._conf['nonexsit_option'])
+
+ def test_deprecated_conf_values(self):
+ conf = {
+ 'memcache_servers': ','.join(MEMCACHED_SERVERS),
+ }
+
+ middleware = auth_token.AuthProtocol(self.fake_app, conf)
+ self.assertEqual(MEMCACHED_SERVERS,
+ middleware._conf_get('memcached_servers'))
+
+ def test_conf_values_type_convert_with_wrong_value(self):
+ conf = {
+ 'include_service_catalog': '123',
+ }
+ self.assertRaises(exc.ConfigurationError,
+ auth_token.AuthProtocol, self.fake_app, conf)
+
+
+class CommonAuthTokenMiddlewareTest(object):
+ """These tests are run once using v2 tokens and again using v3 tokens."""
+
+ def test_init_does_not_call_http(self):
+ conf = {
+ 'revocation_cache_time': '1'
+ }
+ self.set_middleware(conf=conf)
+ self.assertLastPath(None)
+
+ def test_auth_with_no_token_does_not_call_http(self):
+ self.set_middleware()
+ req = webob.Request.blank('/')
+ self.middleware(req.environ, self.start_fake_response)
+ self.assertLastPath(None)
+ self.assertEqual(401, self.response_status)
+
+ def test_init_by_ipv6Addr_auth_host(self):
+ del self.conf['identity_uri']
+ conf = {
+ 'auth_host': '2001:2013:1:f101::1',
+ 'auth_port': '1234',
+ 'auth_protocol': 'http',
+ 'auth_uri': None,
+ 'auth_version': 'v3.0',
+ }
+ self.set_middleware(conf=conf)
+ expected_auth_uri = 'http://[2001:2013:1:f101::1]:1234'
+ self.assertEqual(expected_auth_uri,
+ self.middleware._auth_uri)
+
+ def assert_valid_request_200(self, token, with_catalog=True):
+ req = webob.Request.blank('/')
+ req.headers['X-Auth-Token'] = token
+ body = self.middleware(req.environ, self.start_fake_response)
+ self.assertEqual(self.response_status, 200)
+ if with_catalog:
+ self.assertTrue(req.headers.get('X-Service-Catalog'))
+ else:
+ self.assertNotIn('X-Service-Catalog', req.headers)
+ self.assertEqual(body, [FakeApp.SUCCESS])
+ self.assertIn('keystone.token_info', req.environ)
+ return req
+
+ def test_valid_uuid_request(self):
+ for _ in range(2): # Do it twice because first result was cached.
+ token = self.token_dict['uuid_token_default']
+ self.assert_valid_request_200(token)
+ self.assert_valid_last_url(token)
+
+ def test_valid_uuid_request_with_auth_fragments(self):
+ del self.conf['identity_uri']
+ self.conf['auth_protocol'] = 'https'
+ self.conf['auth_host'] = 'keystone.example.com'
+ self.conf['auth_port'] = '1234'
+ self.conf['auth_admin_prefix'] = '/testadmin'
+ self.set_middleware()
+ self.assert_valid_request_200(self.token_dict['uuid_token_default'])
+ self.assert_valid_last_url(self.token_dict['uuid_token_default'])
+
+ def _test_cache_revoked(self, token, revoked_form=None):
+ # When the token is cached and revoked, 401 is returned.
+ self.middleware._check_revocations_for_cached = True
+
+ req = webob.Request.blank('/')
+ req.headers['X-Auth-Token'] = token
+
+ # Token should be cached as ok after this.
+ self.middleware(req.environ, self.start_fake_response)
+ self.assertEqual(200, self.response_status)
+
+ # Put it in revocation list.
+ self.middleware._revocations._list = self.get_revocation_list_json(
+ token_ids=[revoked_form or token])
+ self.middleware(req.environ, self.start_fake_response)
+ self.assertEqual(401, self.response_status)
+
+ def test_cached_revoked_uuid(self):
+ # When the UUID token is cached and revoked, 401 is returned.
+ self._test_cache_revoked(self.token_dict['uuid_token_default'])
+
+ def test_valid_signed_request(self):
+ for _ in range(2): # Do it twice because first result was cached.
+ self.assert_valid_request_200(
+ self.token_dict['signed_token_scoped'])
+ # ensure that signed requests do not generate HTTP traffic
+ self.assertLastPath(None)
+
+ def test_valid_signed_compressed_request(self):
+ self.assert_valid_request_200(
+ self.token_dict['signed_token_scoped_pkiz'])
+ # ensure that signed requests do not generate HTTP traffic
+ self.assertLastPath(None)
+
+ def test_revoked_token_receives_401(self):
+ self.middleware._revocations._list = (
+ self.get_revocation_list_json())
+ req = webob.Request.blank('/')
+ req.headers['X-Auth-Token'] = self.token_dict['revoked_token']
+ self.middleware(req.environ, self.start_fake_response)
+ self.assertEqual(self.response_status, 401)
+
+ def test_revoked_token_receives_401_sha256(self):
+ self.conf['hash_algorithms'] = ','.join(['sha256', 'md5'])
+ self.set_middleware()
+ self.middleware._revocations._list = (
+ self.get_revocation_list_json(mode='sha256'))
+ req = webob.Request.blank('/')
+ req.headers['X-Auth-Token'] = self.token_dict['revoked_token']
+ self.middleware(req.environ, self.start_fake_response)
+ self.assertEqual(self.response_status, 401)
+
+ def test_cached_revoked_pki(self):
+ # When the PKI token is cached and revoked, 401 is returned.
+ token = self.token_dict['signed_token_scoped']
+ revoked_form = cms.cms_hash_token(token)
+ self._test_cache_revoked(token, revoked_form)
+
+ def test_cached_revoked_pkiz(self):
+ # When the PKIZ token is cached and revoked, 401 is returned.
+ token = self.token_dict['signed_token_scoped_pkiz']
+ revoked_form = cms.cms_hash_token(token)
+ self._test_cache_revoked(token, revoked_form)
+
+ def test_revoked_token_receives_401_md5_secondary(self):
+ # When hash_algorithms has 'md5' as the secondary hash and the
+ # revocation list contains the md5 hash for a token, that token is
+ # considered revoked so returns 401.
+ self.conf['hash_algorithms'] = ','.join(['sha256', 'md5'])
+ self.set_middleware()
+ self.middleware._revocations._list = (
+ self.get_revocation_list_json())
+ req = webob.Request.blank('/')
+ req.headers['X-Auth-Token'] = self.token_dict['revoked_token']
+ self.middleware(req.environ, self.start_fake_response)
+ self.assertEqual(self.response_status, 401)
+
+ def _test_revoked_hashed_token(self, token_name):
+ # If hash_algorithms is set as ['sha256', 'md5'],
+ # and check_revocations_for_cached is True,
+ # and a token is in the cache because it was successfully validated
+ # using the md5 hash, then
+ # if the token is in the revocation list by md5 hash, it'll be
+ # rejected and auth_token returns 401.
+ self.conf['hash_algorithms'] = ','.join(['sha256', 'md5'])
+ self.conf['check_revocations_for_cached'] = 'true'
+ self.set_middleware()
+
+ token = self.token_dict[token_name]
+
+ # Put the token in the revocation list.
+ token_hashed = cms.cms_hash_token(token)
+ self.middleware._revocations._list = self.get_revocation_list_json(
+ token_ids=[token_hashed])
+
+ # First, request is using the hashed token, is valid so goes in
+ # cache using the given hash.
+ req = webob.Request.blank('/')
+ req.headers['X-Auth-Token'] = token_hashed
+ self.middleware(req.environ, self.start_fake_response)
+ self.assertEqual(200, self.response_status)
+
+ # This time use the PKI(Z) token
+ req.headers['X-Auth-Token'] = token
+ self.middleware(req.environ, self.start_fake_response)
+
+ # Should find the token in the cache and revocation list.
+ self.assertEqual(401, self.response_status)
+
+ def test_revoked_hashed_pki_token(self):
+ self._test_revoked_hashed_token('signed_token_scoped')
+
+ def test_revoked_hashed_pkiz_token(self):
+ self._test_revoked_hashed_token('signed_token_scoped_pkiz')
+
+ def get_revocation_list_json(self, token_ids=None, mode=None):
+ if token_ids is None:
+ key = 'revoked_token_hash' + (('_' + mode) if mode else '')
+ token_ids = [self.token_dict[key]]
+ revocation_list = {'revoked': [{'id': x, 'expires': timeutils.utcnow()}
+ for x in token_ids]}
+ return jsonutils.dumps(revocation_list)
+
+ def test_is_signed_token_revoked_returns_false(self):
+ # explicitly setting an empty revocation list here to document intent
+ self.middleware._revocations._list = jsonutils.dumps(
+ {"revoked": [], "extra": "success"})
+ result = self.middleware._revocations._any_revoked(
+ [self.token_dict['revoked_token_hash']])
+ self.assertFalse(result)
+
+ def test_is_signed_token_revoked_returns_true(self):
+ self.middleware._revocations._list = (
+ self.get_revocation_list_json())
+ result = self.middleware._revocations._any_revoked(
+ [self.token_dict['revoked_token_hash']])
+ self.assertTrue(result)
+
+ def test_is_signed_token_revoked_returns_true_sha256(self):
+ self.conf['hash_algorithms'] = ','.join(['sha256', 'md5'])
+ self.set_middleware()
+ self.middleware._revocations._list = (
+ self.get_revocation_list_json(mode='sha256'))
+ result = self.middleware._revocations._any_revoked(
+ [self.token_dict['revoked_token_hash_sha256']])
+ self.assertTrue(result)
+
+ def test_verify_signed_token_raises_exception_for_revoked_token(self):
+ self.middleware._revocations._list = (
+ self.get_revocation_list_json())
+ self.assertRaises(exc.InvalidToken,
+ self.middleware._verify_signed_token,
+ self.token_dict['revoked_token'],
+ [self.token_dict['revoked_token_hash']])
+
+ def test_verify_signed_token_raises_exception_for_revoked_token_s256(self):
+ self.conf['hash_algorithms'] = ','.join(['sha256', 'md5'])
+ self.set_middleware()
+ self.middleware._revocations._list = (
+ self.get_revocation_list_json(mode='sha256'))
+ self.assertRaises(exc.InvalidToken,
+ self.middleware._verify_signed_token,
+ self.token_dict['revoked_token'],
+ [self.token_dict['revoked_token_hash_sha256'],
+ self.token_dict['revoked_token_hash']])
+
+ def test_verify_signed_token_raises_exception_for_revoked_pkiz_token(self):
+ self.middleware._revocations._list = (
+ self.examples.REVOKED_TOKEN_PKIZ_LIST_JSON)
+ self.assertRaises(exc.InvalidToken,
+ self.middleware._verify_pkiz_token,
+ self.token_dict['revoked_token_pkiz'],
+ [self.token_dict['revoked_token_pkiz_hash']])
+
+ def assertIsValidJSON(self, text):
+ json.loads(text)
+
+ def test_verify_signed_token_succeeds_for_unrevoked_token(self):
+ self.middleware._revocations._list = (
+ self.get_revocation_list_json())
+ text = self.middleware._verify_signed_token(
+ self.token_dict['signed_token_scoped'],
+ [self.token_dict['signed_token_scoped_hash']])
+ self.assertIsValidJSON(text)
+
+ def test_verify_signed_compressed_token_succeeds_for_unrevoked_token(self):
+ self.middleware._revocations._list = (
+ self.get_revocation_list_json())
+ text = self.middleware._verify_pkiz_token(
+ self.token_dict['signed_token_scoped_pkiz'],
+ [self.token_dict['signed_token_scoped_hash']])
+ self.assertIsValidJSON(text)
+
+ def test_verify_signed_token_succeeds_for_unrevoked_token_sha256(self):
+ self.conf['hash_algorithms'] = ','.join(['sha256', 'md5'])
+ self.set_middleware()
+ self.middleware._revocations._list = (
+ self.get_revocation_list_json(mode='sha256'))
+ text = self.middleware._verify_signed_token(
+ self.token_dict['signed_token_scoped'],
+ [self.token_dict['signed_token_scoped_hash_sha256'],
+ self.token_dict['signed_token_scoped_hash']])
+ self.assertIsValidJSON(text)
+
+ def test_get_token_revocation_list_fetched_time_returns_min(self):
+ self.middleware._revocations._fetched_time = None
+
+ # Get rid of the revoked file
+ revoked_path = self.middleware._signing_directory.calc_path(
+ _revocations.Revocations._FILE_NAME)
+ os.remove(revoked_path)
+
+ self.assertEqual(self.middleware._revocations._fetched_time,
+ datetime.datetime.min)
+
+ # FIXME(blk-u): move the unit tests into unit/test_auth_token.py
+ def test_get_token_revocation_list_fetched_time_returns_mtime(self):
+ self.middleware._revocations._fetched_time = None
+ revoked_path = self.middleware._signing_directory.calc_path(
+ _revocations.Revocations._FILE_NAME)
+ mtime = os.path.getmtime(revoked_path)
+ fetched_time = datetime.datetime.utcfromtimestamp(mtime)
+ self.assertEqual(fetched_time,
+ self.middleware._revocations._fetched_time)
+
+ @testtools.skipUnless(TimezoneFixture.supported(),
+ 'TimezoneFixture not supported')
+ def test_get_token_revocation_list_fetched_time_returns_utc(self):
+ with TimezoneFixture('UTC-1'):
+ self.middleware._revocations._list = jsonutils.dumps(
+ self.examples.REVOCATION_LIST)
+ self.middleware._revocations._fetched_time = None
+ fetched_time = self.middleware._revocations._fetched_time
+ self.assertTrue(timeutils.is_soon(fetched_time, 1))
+
+ def test_get_token_revocation_list_fetched_time_returns_value(self):
+ expected = self.middleware._revocations._fetched_time
+ self.assertEqual(self.middleware._revocations._fetched_time,
+ expected)
+
+ def test_get_revocation_list_returns_fetched_list(self):
+ # auth_token uses v2 to fetch this, so don't allow the v3
+ # tests to override the fake http connection
+ self.middleware._revocations._fetched_time = None
+
+ # Get rid of the revoked file
+ revoked_path = self.middleware._signing_directory.calc_path(
+ _revocations.Revocations._FILE_NAME)
+ os.remove(revoked_path)
+
+ self.assertEqual(self.middleware._revocations._list,
+ self.examples.REVOCATION_LIST)
+
+ def test_get_revocation_list_returns_current_list_from_memory(self):
+ self.assertEqual(self.middleware._revocations._list,
+ self.middleware._revocations._list_prop)
+
+ def test_get_revocation_list_returns_current_list_from_disk(self):
+ in_memory_list = self.middleware._revocations._list
+ self.middleware._revocations._list_prop = None
+ self.assertEqual(self.middleware._revocations._list,
+ in_memory_list)
+
+ def test_invalid_revocation_list_raises_error(self):
+ self.requests.get('%s/v2.0/tokens/revoked' % BASE_URI, json={})
+
+ self.assertRaises(exc.RevocationListError,
+ self.middleware._revocations._fetch)
+
+ def test_fetch_revocation_list(self):
+ # auth_token uses v2 to fetch this, so don't allow the v3
+ # tests to override the fake http connection
+ fetched = jsonutils.loads(self.middleware._revocations._fetch())
+ self.assertEqual(fetched, self.examples.REVOCATION_LIST)
+
+ def test_request_invalid_uuid_token(self):
+ # remember because we are testing the middleware we stub the connection
+ # to the keystone server, but this is not what gets returned
+ invalid_uri = "%s/v2.0/tokens/invalid-token" % BASE_URI
+ self.requests.get(invalid_uri, status_code=404)
+
+ req = webob.Request.blank('/')
+ req.headers['X-Auth-Token'] = 'invalid-token'
+ self.middleware(req.environ, self.start_fake_response)
+ self.assertEqual(self.response_status, 401)
+ self.assertEqual(self.response_headers['WWW-Authenticate'],
+ "Keystone uri='https://keystone.example.com:1234'")
+
+ def test_request_invalid_signed_token(self):
+ req = webob.Request.blank('/')
+ req.headers['X-Auth-Token'] = self.examples.INVALID_SIGNED_TOKEN
+ self.middleware(req.environ, self.start_fake_response)
+ self.assertEqual(401, self.response_status)
+ self.assertEqual("Keystone uri='https://keystone.example.com:1234'",
+ self.response_headers['WWW-Authenticate'])
+
+ def test_request_invalid_signed_pkiz_token(self):
+ req = webob.Request.blank('/')
+ req.headers['X-Auth-Token'] = self.examples.INVALID_SIGNED_PKIZ_TOKEN
+ self.middleware(req.environ, self.start_fake_response)
+ self.assertEqual(401, self.response_status)
+ self.assertEqual("Keystone uri='https://keystone.example.com:1234'",
+ self.response_headers['WWW-Authenticate'])
+
+ def test_request_no_token(self):
+ req = webob.Request.blank('/')
+ self.middleware(req.environ, self.start_fake_response)
+ self.assertEqual(self.response_status, 401)
+ self.assertEqual(self.response_headers['WWW-Authenticate'],
+ "Keystone uri='https://keystone.example.com:1234'")
+
+ def test_request_no_token_log_message(self):
+ class FakeLog(object):
+ def __init__(self):
+ self.msg = None
+ self.debugmsg = None
+
+ def warn(self, msg=None, *args, **kwargs):
+ self.msg = msg
+
+ def debug(self, msg=None, *args, **kwargs):
+ self.debugmsg = msg
+
+ self.middleware._LOG = FakeLog()
+ self.middleware._delay_auth_decision = False
+ self.assertRaises(exc.InvalidToken,
+ self.middleware._get_user_token_from_header, {})
+ self.assertIsNotNone(self.middleware._LOG.msg)
+ self.assertIsNotNone(self.middleware._LOG.debugmsg)
+
+ def test_request_no_token_http(self):
+ req = webob.Request.blank('/', environ={'REQUEST_METHOD': 'HEAD'})
+ self.set_middleware()
+ body = self.middleware(req.environ, self.start_fake_response)
+ self.assertEqual(self.response_status, 401)
+ self.assertEqual(self.response_headers['WWW-Authenticate'],
+ "Keystone uri='https://keystone.example.com:1234'")
+ self.assertEqual(body, [''])
+
+ def test_request_blank_token(self):
+ req = webob.Request.blank('/')
+ req.headers['X-Auth-Token'] = ''
+ self.middleware(req.environ, self.start_fake_response)
+ self.assertEqual(self.response_status, 401)
+ self.assertEqual(self.response_headers['WWW-Authenticate'],
+ "Keystone uri='https://keystone.example.com:1234'")
+
+ def _get_cached_token(self, token, mode='md5'):
+ token_id = cms.cms_hash_token(token, mode=mode)
+ return self.middleware._token_cache._cache_get(token_id)
+
+ def test_memcache(self):
+ req = webob.Request.blank('/')
+ token = self.token_dict['signed_token_scoped']
+ req.headers['X-Auth-Token'] = token
+ self.middleware(req.environ, self.start_fake_response)
+ self.assertIsNotNone(self._get_cached_token(token))
+
+ def test_expired(self):
+ req = webob.Request.blank('/')
+ token = self.token_dict['signed_token_scoped_expired']
+ req.headers['X-Auth-Token'] = token
+ self.middleware(req.environ, self.start_fake_response)
+ self.assertEqual(self.response_status, 401)
+
+ def test_memcache_set_invalid_uuid(self):
+ invalid_uri = "%s/v2.0/tokens/invalid-token" % BASE_URI
+ self.requests.get(invalid_uri, status_code=404)
+
+ req = webob.Request.blank('/')
+ token = 'invalid-token'
+ req.headers['X-Auth-Token'] = token
+ self.middleware(req.environ, self.start_fake_response)
+ self.assertRaises(exc.InvalidToken,
+ self._get_cached_token, token)
+
+ def _test_memcache_set_invalid_signed(self, hash_algorithms=None,
+ exp_mode='md5'):
+ req = webob.Request.blank('/')
+ token = self.token_dict['signed_token_scoped_expired']
+ req.headers['X-Auth-Token'] = token
+ if hash_algorithms:
+ self.conf['hash_algorithms'] = ','.join(hash_algorithms)
+ self.set_middleware()
+ self.middleware(req.environ, self.start_fake_response)
+ self.assertRaises(exc.InvalidToken,
+ self._get_cached_token, token, mode=exp_mode)
+
+ def test_memcache_set_invalid_signed(self):
+ self._test_memcache_set_invalid_signed()
+
+ def test_memcache_set_invalid_signed_sha256_md5(self):
+ hash_algorithms = ['sha256', 'md5']
+ self._test_memcache_set_invalid_signed(hash_algorithms=hash_algorithms,
+ exp_mode='sha256')
+
+ def test_memcache_set_invalid_signed_sha256(self):
+ hash_algorithms = ['sha256']
+ self._test_memcache_set_invalid_signed(hash_algorithms=hash_algorithms,
+ exp_mode='sha256')
+
+ def test_memcache_set_expired(self, extra_conf={}, extra_environ={}):
+ token_cache_time = 10
+ conf = {
+ 'token_cache_time': '%s' % token_cache_time,
+ }
+ conf.update(extra_conf)
+ self.set_middleware(conf=conf)
+ req = webob.Request.blank('/')
+ token = self.token_dict['signed_token_scoped']
+ req.headers['X-Auth-Token'] = token
+ req.environ.update(extra_environ)
+
+ now = datetime.datetime.utcnow()
+ self.useFixture(TimeFixture(now))
+ self.middleware(req.environ, self.start_fake_response)
+ self.assertIsNotNone(self._get_cached_token(token))
+
+ timeutils.advance_time_seconds(token_cache_time)
+ self.assertIsNone(self._get_cached_token(token))
+
+ def test_swift_memcache_set_expired(self):
+ extra_conf = {'cache': 'swift.cache'}
+ extra_environ = {'swift.cache': memorycache.Client()}
+ self.test_memcache_set_expired(extra_conf, extra_environ)
+
+ def test_http_error_not_cached_token(self):
+ """Test to don't cache token as invalid on network errors.
+
+ We use UUID tokens since they are the easiest one to reach
+ get_http_connection.
+ """
+ req = webob.Request.blank('/')
+ req.headers['X-Auth-Token'] = ERROR_TOKEN
+ self.middleware._http_request_max_retries = 0
+ self.middleware(req.environ, self.start_fake_response)
+ self.assertIsNone(self._get_cached_token(ERROR_TOKEN))
+ self.assert_valid_last_url(ERROR_TOKEN)
+
+ def test_http_request_max_retries(self):
+ times_retry = 10
+
+ req = webob.Request.blank('/')
+ req.headers['X-Auth-Token'] = ERROR_TOKEN
+
+ conf = {'http_request_max_retries': '%s' % times_retry}
+ self.set_middleware(conf=conf)
+
+ with mock.patch('time.sleep') as mock_obj:
+ self.middleware(req.environ, self.start_fake_response)
+
+ self.assertEqual(mock_obj.call_count, times_retry)
+
+ def test_nocatalog(self):
+ conf = {
+ 'include_service_catalog': 'False'
+ }
+ self.set_middleware(conf=conf)
+ self.assert_valid_request_200(self.token_dict['uuid_token_default'],
+ with_catalog=False)
+
+ def assert_kerberos_bind(self, token, bind_level,
+ use_kerberos=True, success=True):
+ conf = {
+ 'enforce_token_bind': bind_level,
+ 'auth_version': self.auth_version,
+ }
+ self.set_middleware(conf=conf)
+
+ req = webob.Request.blank('/')
+ req.headers['X-Auth-Token'] = token
+
+ if use_kerberos:
+ if use_kerberos is True:
+ req.environ['REMOTE_USER'] = self.examples.KERBEROS_BIND
+ else:
+ req.environ['REMOTE_USER'] = use_kerberos
+
+ req.environ['AUTH_TYPE'] = 'Negotiate'
+
+ body = self.middleware(req.environ, self.start_fake_response)
+
+ if success:
+ self.assertEqual(self.response_status, 200)
+ self.assertEqual(body, [FakeApp.SUCCESS])
+ self.assertIn('keystone.token_info', req.environ)
+ self.assert_valid_last_url(token)
+ else:
+ self.assertEqual(self.response_status, 401)
+ self.assertEqual(self.response_headers['WWW-Authenticate'],
+ "Keystone uri='https://keystone.example.com:1234'"
+ )
+
+ def test_uuid_bind_token_disabled_with_kerb_user(self):
+ for use_kerberos in [True, False]:
+ self.assert_kerberos_bind(self.token_dict['uuid_token_bind'],
+ bind_level='disabled',
+ use_kerberos=use_kerberos,
+ success=True)
+
+ def test_uuid_bind_token_disabled_with_incorrect_ticket(self):
+ self.assert_kerberos_bind(self.token_dict['uuid_token_bind'],
+ bind_level='kerberos',
+ use_kerberos='ronald@MCDONALDS.COM',
+ success=False)
+
+ def test_uuid_bind_token_permissive_with_kerb_user(self):
+ self.assert_kerberos_bind(self.token_dict['uuid_token_bind'],
+ bind_level='permissive',
+ use_kerberos=True,
+ success=True)
+
+ def test_uuid_bind_token_permissive_without_kerb_user(self):
+ self.assert_kerberos_bind(self.token_dict['uuid_token_bind'],
+ bind_level='permissive',
+ use_kerberos=False,
+ success=False)
+
+ def test_uuid_bind_token_permissive_with_unknown_bind(self):
+ token = self.token_dict['uuid_token_unknown_bind']
+
+ for use_kerberos in [True, False]:
+ self.assert_kerberos_bind(token,
+ bind_level='permissive',
+ use_kerberos=use_kerberos,
+ success=True)
+
+ def test_uuid_bind_token_permissive_with_incorrect_ticket(self):
+ self.assert_kerberos_bind(self.token_dict['uuid_token_bind'],
+ bind_level='kerberos',
+ use_kerberos='ronald@MCDONALDS.COM',
+ success=False)
+
+ def test_uuid_bind_token_strict_with_kerb_user(self):
+ self.assert_kerberos_bind(self.token_dict['uuid_token_bind'],
+ bind_level='strict',
+ use_kerberos=True,
+ success=True)
+
+ def test_uuid_bind_token_strict_with_kerbout_user(self):
+ self.assert_kerberos_bind(self.token_dict['uuid_token_bind'],
+ bind_level='strict',
+ use_kerberos=False,
+ success=False)
+
+ def test_uuid_bind_token_strict_with_unknown_bind(self):
+ token = self.token_dict['uuid_token_unknown_bind']
+
+ for use_kerberos in [True, False]:
+ self.assert_kerberos_bind(token,
+ bind_level='strict',
+ use_kerberos=use_kerberos,
+ success=False)
+
+ def test_uuid_bind_token_required_with_kerb_user(self):
+ self.assert_kerberos_bind(self.token_dict['uuid_token_bind'],
+ bind_level='required',
+ use_kerberos=True,
+ success=True)
+
+ def test_uuid_bind_token_required_without_kerb_user(self):
+ self.assert_kerberos_bind(self.token_dict['uuid_token_bind'],
+ bind_level='required',
+ use_kerberos=False,
+ success=False)
+
+ def test_uuid_bind_token_required_with_unknown_bind(self):
+ token = self.token_dict['uuid_token_unknown_bind']
+
+ for use_kerberos in [True, False]:
+ self.assert_kerberos_bind(token,
+ bind_level='required',
+ use_kerberos=use_kerberos,
+ success=False)
+
+ def test_uuid_bind_token_required_without_bind(self):
+ for use_kerberos in [True, False]:
+ self.assert_kerberos_bind(self.token_dict['uuid_token_default'],
+ bind_level='required',
+ use_kerberos=use_kerberos,
+ success=False)
+
+ def test_uuid_bind_token_named_kerberos_with_kerb_user(self):
+ self.assert_kerberos_bind(self.token_dict['uuid_token_bind'],
+ bind_level='kerberos',
+ use_kerberos=True,
+ success=True)
+
+ def test_uuid_bind_token_named_kerberos_without_kerb_user(self):
+ self.assert_kerberos_bind(self.token_dict['uuid_token_bind'],
+ bind_level='kerberos',
+ use_kerberos=False,
+ success=False)
+
+ def test_uuid_bind_token_named_kerberos_with_unknown_bind(self):
+ token = self.token_dict['uuid_token_unknown_bind']
+
+ for use_kerberos in [True, False]:
+ self.assert_kerberos_bind(token,
+ bind_level='kerberos',
+ use_kerberos=use_kerberos,
+ success=False)
+
+ def test_uuid_bind_token_named_kerberos_without_bind(self):
+ for use_kerberos in [True, False]:
+ self.assert_kerberos_bind(self.token_dict['uuid_token_default'],
+ bind_level='kerberos',
+ use_kerberos=use_kerberos,
+ success=False)
+
+ def test_uuid_bind_token_named_kerberos_with_incorrect_ticket(self):
+ self.assert_kerberos_bind(self.token_dict['uuid_token_bind'],
+ bind_level='kerberos',
+ use_kerberos='ronald@MCDONALDS.COM',
+ success=False)
+
+ def test_uuid_bind_token_with_unknown_named_FOO(self):
+ token = self.token_dict['uuid_token_bind']
+
+ for use_kerberos in [True, False]:
+ self.assert_kerberos_bind(token,
+ bind_level='FOO',
+ use_kerberos=use_kerberos,
+ success=False)
+
+ def test_caching_token_on_verify(self):
+ # When the token is cached it isn't cached again when it's verified.
+
+ # The token cache has to be initialized with our cache instance.
+ self.middleware._token_cache._env_cache_name = 'cache'
+ cache = memorycache.Client()
+ self.middleware._token_cache.initialize(env={'cache': cache})
+
+ # Mock cache.set since then the test can verify call_count.
+ orig_cache_set = cache.set
+ cache.set = mock.Mock(side_effect=orig_cache_set)
+
+ token = self.token_dict['signed_token_scoped']
+
+ req = webob.Request.blank('/')
+ req.headers['X-Auth-Token'] = token
+ self.middleware(req.environ, self.start_fake_response)
+ self.assertEqual(200, self.response_status)
+
+ self.assertThat(1, matchers.Equals(cache.set.call_count))
+
+ req = webob.Request.blank('/')
+ req.headers['X-Auth-Token'] = token
+ self.middleware(req.environ, self.start_fake_response)
+ self.assertEqual(200, self.response_status)
+
+ # Assert that the token wasn't cached again.
+ self.assertThat(1, matchers.Equals(cache.set.call_count))
+
+ def test_auth_plugin(self):
+
+ for service_url in (self.examples.UNVERSIONED_SERVICE_URL,
+ self.examples.SERVICE_URL):
+ self.requests.get(service_url,
+ json=VERSION_LIST_v3,
+ status_code=300)
+
+ req = webob.Request.blank('/')
+ req.headers['X-Auth-Token'] = self.token_dict['uuid_token_default']
+ body = self.middleware(req.environ, self.start_fake_response)
+ self.assertEqual(200, self.response_status)
+ self.assertEqual([FakeApp.SUCCESS], body)
+
+ token_auth = req.environ['keystone.token_auth']
+ endpoint_filter = {'service_type': self.examples.SERVICE_TYPE,
+ 'version': 3}
+
+ url = token_auth.get_endpoint(session.Session(), **endpoint_filter)
+ self.assertEqual('%s/v3' % BASE_URI, url)
+
+ self.assertTrue(token_auth.has_user_token)
+ self.assertFalse(token_auth.has_service_token)
+ self.assertIsNone(token_auth.service)
+
+
+class V2CertDownloadMiddlewareTest(BaseAuthTokenMiddlewareTest,
+ testresources.ResourcedTestCase):
+
+ resources = [('examples', client_fixtures.EXAMPLES_RESOURCE)]
+
+ def __init__(self, *args, **kwargs):
+ super(V2CertDownloadMiddlewareTest, self).__init__(*args, **kwargs)
+ self.auth_version = 'v2.0'
+ self.fake_app = None
+ self.ca_path = '/v2.0/certificates/ca'
+ self.signing_path = '/v2.0/certificates/signing'
+
+ def setUp(self):
+ super(V2CertDownloadMiddlewareTest, self).setUp(
+ auth_version=self.auth_version,
+ fake_app=self.fake_app)
+ self.base_dir = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, self.base_dir)
+ self.cert_dir = os.path.join(self.base_dir, 'certs')
+ os.makedirs(self.cert_dir, stat.S_IRWXU)
+ conf = {
+ 'signing_dir': self.cert_dir,
+ 'auth_version': self.auth_version,
+ }
+
+ self.requests.register_uri('GET',
+ BASE_URI,
+ json=VERSION_LIST_v3,
+ status_code=300)
+
+ self.set_middleware(conf=conf)
+
+ # Usually we supply a signed_dir with pre-installed certificates,
+ # so invocation of /usr/bin/openssl succeeds. This time we give it
+ # an empty directory, so it fails.
+ def test_request_no_token_dummy(self):
+ cms._ensure_subprocess()
+
+ self.requests.get('%s%s' % (BASE_URI, self.ca_path),
+ status_code=404)
+ self.requests.get('%s%s' % (BASE_URI, self.signing_path),
+ status_code=404)
+ self.assertRaises(exceptions.CertificateConfigError,
+ self.middleware._verify_signed_token,
+ self.examples.SIGNED_TOKEN_SCOPED,
+ [self.examples.SIGNED_TOKEN_SCOPED_HASH])
+
+ def test_fetch_signing_cert(self):
+ data = 'FAKE CERT'
+ url = "%s%s" % (BASE_URI, self.signing_path)
+ self.requests.get(url, text=data)
+ self.middleware._fetch_signing_cert()
+
+ signing_cert_path = self.middleware._signing_directory.calc_path(
+ self.middleware._SIGNING_CERT_FILE_NAME)
+ with open(signing_cert_path, 'r') as f:
+ self.assertEqual(f.read(), data)
+
+ self.assertEqual(url, self.requests.last_request.url)
+
+ def test_fetch_signing_ca(self):
+ data = 'FAKE CA'
+ url = "%s%s" % (BASE_URI, self.ca_path)
+ self.requests.get(url, text=data)
+ self.middleware._fetch_ca_cert()
+
+ ca_file_path = self.middleware._signing_directory.calc_path(
+ self.middleware._SIGNING_CA_FILE_NAME)
+ with open(ca_file_path, 'r') as f:
+ self.assertEqual(f.read(), data)
+
+ self.assertEqual(url, self.requests.last_request.url)
+
+ def test_prefix_trailing_slash(self):
+ del self.conf['identity_uri']
+ self.conf['auth_protocol'] = 'https'
+ self.conf['auth_host'] = 'keystone.example.com'
+ self.conf['auth_port'] = '1234'
+ self.conf['auth_admin_prefix'] = '/newadmin/'
+
+ base_url = '%s/newadmin' % BASE_HOST
+ ca_url = "%s%s" % (base_url, self.ca_path)
+ signing_url = "%s%s" % (base_url, self.signing_path)
+
+ self.requests.get(base_url,
+ json=VERSION_LIST_v3,
+ status_code=300)
+ self.requests.get(ca_url, text='FAKECA')
+ self.requests.get(signing_url, text='FAKECERT')
+
+ self.set_middleware(conf=self.conf)
+
+ self.middleware._fetch_ca_cert()
+ self.assertEqual(ca_url, self.requests.last_request.url)
+
+ self.middleware._fetch_signing_cert()
+ self.assertEqual(signing_url, self.requests.last_request.url)
+
+ def test_without_prefix(self):
+ del self.conf['identity_uri']
+ self.conf['auth_protocol'] = 'https'
+ self.conf['auth_host'] = 'keystone.example.com'
+ self.conf['auth_port'] = '1234'
+ self.conf['auth_admin_prefix'] = ''
+
+ ca_url = "%s%s" % (BASE_HOST, self.ca_path)
+ signing_url = "%s%s" % (BASE_HOST, self.signing_path)
+
+ self.requests.get(BASE_HOST,
+ json=VERSION_LIST_v3,
+ status_code=300)
+ self.requests.get(ca_url, text='FAKECA')
+ self.requests.get(signing_url, text='FAKECERT')
+
+ self.set_middleware(conf=self.conf)
+
+ self.middleware._fetch_ca_cert()
+ self.assertEqual(ca_url, self.requests.last_request.url)
+
+ self.middleware._fetch_signing_cert()
+ self.assertEqual(signing_url, self.requests.last_request.url)
+
+
+class V3CertDownloadMiddlewareTest(V2CertDownloadMiddlewareTest):
+
+ def __init__(self, *args, **kwargs):
+ super(V3CertDownloadMiddlewareTest, self).__init__(*args, **kwargs)
+ self.auth_version = 'v3.0'
+ self.fake_app = v3FakeApp
+ self.ca_path = '/v3/OS-SIMPLE-CERT/ca'
+ self.signing_path = '/v3/OS-SIMPLE-CERT/certificates'
+
+
+def network_error_response(request, context):
+ raise exceptions.ConnectionError("Network connection error.")
+
+
+class v2AuthTokenMiddlewareTest(BaseAuthTokenMiddlewareTest,
+ CommonAuthTokenMiddlewareTest,
+ testresources.ResourcedTestCase):
+ """v2 token specific tests.
+
+ There are some differences between how the auth-token middleware handles
+ v2 and v3 tokens over and above the token formats, namely:
+
+ - A v3 keystone server will auto scope a token to a user's default project
+ if no scope is specified. A v2 server assumes that the auth-token
+ middleware will do that.
+ - A v2 keystone server may issue a token without a catalog, even with a
+ tenant
+
+ The tests below were originally part of the generic AuthTokenMiddlewareTest
+ class, but now, since they really are v2 specific, they are included here.
+
+ """
+
+ resources = [('examples', client_fixtures.EXAMPLES_RESOURCE)]
+
+ def setUp(self):
+ super(v2AuthTokenMiddlewareTest, self).setUp()
+
+ self.token_dict = {
+ 'uuid_token_default': self.examples.UUID_TOKEN_DEFAULT,
+ 'uuid_token_unscoped': self.examples.UUID_TOKEN_UNSCOPED,
+ 'uuid_token_bind': self.examples.UUID_TOKEN_BIND,
+ 'uuid_token_unknown_bind': self.examples.UUID_TOKEN_UNKNOWN_BIND,
+ 'signed_token_scoped': self.examples.SIGNED_TOKEN_SCOPED,
+ 'signed_token_scoped_pkiz': self.examples.SIGNED_TOKEN_SCOPED_PKIZ,
+ 'signed_token_scoped_hash': self.examples.SIGNED_TOKEN_SCOPED_HASH,
+ 'signed_token_scoped_hash_sha256':
+ self.examples.SIGNED_TOKEN_SCOPED_HASH_SHA256,
+ 'signed_token_scoped_expired':
+ self.examples.SIGNED_TOKEN_SCOPED_EXPIRED,
+ 'revoked_token': self.examples.REVOKED_TOKEN,
+ 'revoked_token_pkiz': self.examples.REVOKED_TOKEN_PKIZ,
+ 'revoked_token_pkiz_hash':
+ self.examples.REVOKED_TOKEN_PKIZ_HASH,
+ 'revoked_token_hash': self.examples.REVOKED_TOKEN_HASH,
+ 'revoked_token_hash_sha256':
+ self.examples.REVOKED_TOKEN_HASH_SHA256,
+ }
+
+ self.requests.get(BASE_URI,
+ json=VERSION_LIST_v2,
+ status_code=300)
+
+ self.requests.post('%s/v2.0/tokens' % BASE_URI,
+ text=FAKE_ADMIN_TOKEN)
+
+ self.requests.get('%s/v2.0/tokens/revoked' % BASE_URI,
+ text=self.examples.SIGNED_REVOCATION_LIST)
+
+ for token in (self.examples.UUID_TOKEN_DEFAULT,
+ self.examples.UUID_TOKEN_UNSCOPED,
+ self.examples.UUID_TOKEN_BIND,
+ self.examples.UUID_TOKEN_UNKNOWN_BIND,
+ self.examples.UUID_TOKEN_NO_SERVICE_CATALOG,
+ self.examples.SIGNED_TOKEN_SCOPED_KEY,
+ self.examples.SIGNED_TOKEN_SCOPED_PKIZ_KEY,):
+ url = "%s/v2.0/tokens/%s" % (BASE_URI, token)
+ text = self.examples.JSON_TOKEN_RESPONSES[token]
+ self.requests.get(url, text=text)
+
+ url = '%s/v2.0/tokens/%s' % (BASE_URI, ERROR_TOKEN)
+ self.requests.get(url, text=network_error_response)
+
+ self.set_middleware()
+
+ def assert_unscoped_default_tenant_auto_scopes(self, token):
+ """Unscoped v2 requests with a default tenant should "auto-scope."
+
+ The implied scope is the user's tenant ID.
+
+ """
+ req = webob.Request.blank('/')
+ req.headers['X-Auth-Token'] = token
+ body = self.middleware(req.environ, self.start_fake_response)
+ self.assertEqual(self.response_status, 200)
+ self.assertEqual(body, [FakeApp.SUCCESS])
+ self.assertIn('keystone.token_info', req.environ)
+
+ def assert_valid_last_url(self, token_id):
+ self.assertLastPath("/v2.0/tokens/%s" % token_id)
+
+ def test_default_tenant_uuid_token(self):
+ self.assert_unscoped_default_tenant_auto_scopes(
+ self.examples.UUID_TOKEN_DEFAULT)
+
+ def test_default_tenant_signed_token(self):
+ self.assert_unscoped_default_tenant_auto_scopes(
+ self.examples.SIGNED_TOKEN_SCOPED)
+
+ def assert_unscoped_token_receives_401(self, token):
+ """Unscoped requests with no default tenant ID should be rejected."""
+ req = webob.Request.blank('/')
+ req.headers['X-Auth-Token'] = token
+ self.middleware(req.environ, self.start_fake_response)
+ self.assertEqual(self.response_status, 401)
+ self.assertEqual(self.response_headers['WWW-Authenticate'],
+ "Keystone uri='https://keystone.example.com:1234'")
+
+ def test_unscoped_uuid_token_receives_401(self):
+ self.assert_unscoped_token_receives_401(
+ self.examples.UUID_TOKEN_UNSCOPED)
+
+ def test_unscoped_pki_token_receives_401(self):
+ self.assert_unscoped_token_receives_401(
+ self.examples.SIGNED_TOKEN_UNSCOPED)
+
+ def test_request_prevent_service_catalog_injection(self):
+ req = webob.Request.blank('/')
+ req.headers['X-Service-Catalog'] = '[]'
+ req.headers['X-Auth-Token'] = (
+ self.examples.UUID_TOKEN_NO_SERVICE_CATALOG)
+ body = self.middleware(req.environ, self.start_fake_response)
+ self.assertEqual(self.response_status, 200)
+ self.assertFalse(req.headers.get('X-Service-Catalog'))
+ self.assertEqual(body, [FakeApp.SUCCESS])
+
+ def test_user_plugin_token_properties(self):
+ req = webob.Request.blank('/')
+ req.headers['X-Service-Catalog'] = '[]'
+ token = self.examples.UUID_TOKEN_DEFAULT
+ token_data = self.examples.TOKEN_RESPONSES[token]
+ req.headers['X-Auth-Token'] = token
+ req.headers['X-Service-Token'] = token
+
+ body = self.middleware(req.environ, self.start_fake_response)
+ self.assertEqual(self.response_status, 200)
+ self.assertEqual([FakeApp.SUCCESS], body)
+
+ token_auth = req.environ['keystone.token_auth']
+
+ self.assertTrue(token_auth.has_user_token)
+ self.assertTrue(token_auth.has_service_token)
+
+ for t in [token_auth.user, token_auth.service]:
+ self.assertEqual(token_data.user_id, t.user_id)
+ self.assertEqual(token_data.tenant_id, t.project_id)
+
+ self.assertThat(t.role_names, matchers.HasLength(2))
+ self.assertIn('role1', t.role_names)
+ self.assertIn('role2', t.role_names)
+
+ self.assertIsNone(t.trust_id)
+ self.assertIsNone(t.user_domain_id)
+ self.assertIsNone(t.project_domain_id)
+
+
+class CrossVersionAuthTokenMiddlewareTest(BaseAuthTokenMiddlewareTest,
+ testresources.ResourcedTestCase):
+
+ resources = [('examples', client_fixtures.EXAMPLES_RESOURCE)]
+
+ def test_valid_uuid_request_forced_to_2_0(self):
+ """Test forcing auth_token to use lower api version.
+
+ By installing the v3 http hander, auth_token will be get
+ a version list that looks like a v3 server - from which it
+ would normally chose v3.0 as the auth version. However, here
+ we specify v2.0 in the configuration - which should force
+ auth_token to use that version instead.
+
+ """
+ conf = {
+ 'auth_version': 'v2.0'
+ }
+
+ self.requests.get(BASE_URI,
+ json=VERSION_LIST_v3,
+ status_code=300)
+
+ self.requests.post('%s/v2.0/tokens' % BASE_URI,
+ text=FAKE_ADMIN_TOKEN)
+
+ token = self.examples.UUID_TOKEN_DEFAULT
+ url = "%s/v2.0/tokens/%s" % (BASE_URI, token)
+ text = self.examples.JSON_TOKEN_RESPONSES[token]
+ self.requests.get(url, text=text)
+
+ self.set_middleware(conf=conf)
+
+ # This tests will only work is auth_token has chosen to use the
+ # lower, v2, api version
+ req = webob.Request.blank('/')
+ req.headers['X-Auth-Token'] = self.examples.UUID_TOKEN_DEFAULT
+ self.middleware(req.environ, self.start_fake_response)
+ self.assertEqual(self.response_status, 200)
+ self.assertEqual(url, self.requests.last_request.url)
+
+
+class v3AuthTokenMiddlewareTest(BaseAuthTokenMiddlewareTest,
+ CommonAuthTokenMiddlewareTest,
+ testresources.ResourcedTestCase):
+ """Test auth_token middleware with v3 tokens.
+
+ Re-execute the AuthTokenMiddlewareTest class tests, but with the
+ auth_token middleware configured to expect v3 tokens back from
+ a keystone server.
+
+ This is done by configuring the AuthTokenMiddlewareTest class via
+ its Setup(), passing in v3 style data that will then be used by
+ the tests themselves. This approach has been used to ensure we
+ really are running the same tests for both v2 and v3 tokens.
+
+ There a few additional specific test for v3 only:
+
+ - We allow an unscoped token to be validated (as unscoped), where
+ as for v2 tokens, the auth_token middleware is expected to try and
+ auto-scope it (and fail if there is no default tenant)
+ - Domain scoped tokens
+
+ Since we don't specify an auth version for auth_token to use, by
+ definition we are thefore implicitely testing that it will use
+ the highest available auth version, i.e. v3.0
+
+ """
+
+ resources = [('examples', client_fixtures.EXAMPLES_RESOURCE)]
+
+ def setUp(self):
+ super(v3AuthTokenMiddlewareTest, self).setUp(
+ auth_version='v3.0',
+ fake_app=v3FakeApp)
+
+ self.token_dict = {
+ 'uuid_token_default': self.examples.v3_UUID_TOKEN_DEFAULT,
+ 'uuid_token_unscoped': self.examples.v3_UUID_TOKEN_UNSCOPED,
+ 'uuid_token_bind': self.examples.v3_UUID_TOKEN_BIND,
+ 'uuid_token_unknown_bind':
+ self.examples.v3_UUID_TOKEN_UNKNOWN_BIND,
+ 'signed_token_scoped': self.examples.SIGNED_v3_TOKEN_SCOPED,
+ 'signed_token_scoped_pkiz':
+ self.examples.SIGNED_v3_TOKEN_SCOPED_PKIZ,
+ 'signed_token_scoped_hash':
+ self.examples.SIGNED_v3_TOKEN_SCOPED_HASH,
+ 'signed_token_scoped_hash_sha256':
+ self.examples.SIGNED_v3_TOKEN_SCOPED_HASH_SHA256,
+ 'signed_token_scoped_expired':
+ self.examples.SIGNED_TOKEN_SCOPED_EXPIRED,
+ 'revoked_token': self.examples.REVOKED_v3_TOKEN,
+ 'revoked_token_pkiz': self.examples.REVOKED_v3_TOKEN_PKIZ,
+ 'revoked_token_hash': self.examples.REVOKED_v3_TOKEN_HASH,
+ 'revoked_token_hash_sha256':
+ self.examples.REVOKED_v3_TOKEN_HASH_SHA256,
+ 'revoked_token_pkiz_hash':
+ self.examples.REVOKED_v3_PKIZ_TOKEN_HASH,
+ }
+
+ self.requests.get(BASE_URI,
+ json=VERSION_LIST_v3,
+ status_code=300)
+
+ # TODO(jamielennox): auth_token middleware uses a v2 admin token
+ # regardless of the auth_version that is set.
+ self.requests.post('%s/v2.0/tokens' % BASE_URI,
+ text=FAKE_ADMIN_TOKEN)
+
+ # TODO(jamielennox): there is no v3 revocation url yet, it uses v2
+ self.requests.get('%s/v2.0/tokens/revoked' % BASE_URI,
+ text=self.examples.SIGNED_REVOCATION_LIST)
+
+ self.requests.get('%s/v3/auth/tokens' % BASE_URI,
+ text=self.token_response)
+
+ self.set_middleware()
+
+ def token_response(self, request, context):
+ auth_id = request.headers.get('X-Auth-Token')
+ token_id = request.headers.get('X-Subject-Token')
+ self.assertEqual(auth_id, FAKE_ADMIN_TOKEN_ID)
+
+ if token_id == ERROR_TOKEN:
+ raise exceptions.ConnectionError("Network connection error.")
+
+ try:
+ response = self.examples.JSON_TOKEN_RESPONSES[token_id]
+ except KeyError:
+ response = ""
+ context.status_code = 404
+
+ return response
+
+ def assert_valid_last_url(self, token_id):
+ self.assertLastPath('/v3/auth/tokens')
+
+ def test_valid_unscoped_uuid_request(self):
+ # Remove items that won't be in an unscoped token
+ delta_expected_env = {
+ 'HTTP_X_PROJECT_ID': None,
+ 'HTTP_X_PROJECT_NAME': None,
+ 'HTTP_X_PROJECT_DOMAIN_ID': None,
+ 'HTTP_X_PROJECT_DOMAIN_NAME': None,
+ 'HTTP_X_TENANT_ID': None,
+ 'HTTP_X_TENANT_NAME': None,
+ 'HTTP_X_ROLES': '',
+ 'HTTP_X_TENANT': None,
+ 'HTTP_X_ROLE': '',
+ }
+ self.set_middleware(expected_env=delta_expected_env)
+ self.assert_valid_request_200(self.examples.v3_UUID_TOKEN_UNSCOPED,
+ with_catalog=False)
+ self.assertLastPath('/v3/auth/tokens')
+
+ def test_domain_scoped_uuid_request(self):
+ # Modify items compared to default token for a domain scope
+ delta_expected_env = {
+ 'HTTP_X_DOMAIN_ID': 'domain_id1',
+ 'HTTP_X_DOMAIN_NAME': 'domain_name1',
+ 'HTTP_X_PROJECT_ID': None,
+ 'HTTP_X_PROJECT_NAME': None,
+ 'HTTP_X_PROJECT_DOMAIN_ID': None,
+ 'HTTP_X_PROJECT_DOMAIN_NAME': None,
+ 'HTTP_X_TENANT_ID': None,
+ 'HTTP_X_TENANT_NAME': None,
+ 'HTTP_X_TENANT': None
+ }
+ self.set_middleware(expected_env=delta_expected_env)
+ self.assert_valid_request_200(
+ self.examples.v3_UUID_TOKEN_DOMAIN_SCOPED)
+ self.assertLastPath('/v3/auth/tokens')
+
+ def test_gives_v2_catalog(self):
+ self.set_middleware()
+ req = self.assert_valid_request_200(
+ self.examples.SIGNED_v3_TOKEN_SCOPED)
+
+ catalog = jsonutils.loads(req.headers['X-Service-Catalog'])
+
+ for service in catalog:
+ for endpoint in service['endpoints']:
+ # no point checking everything, just that it's in v2 format
+ self.assertIn('adminURL', endpoint)
+ self.assertIn('publicURL', endpoint)
+ self.assertIn('adminURL', endpoint)
+
+ def test_fallback_to_online_validation_with_signing_error(self):
+ self.requests.register_uri(
+ 'GET',
+ '%s/v3/OS-SIMPLE-CERT/certificates' % BASE_URI,
+ status_code=404)
+ self.assert_valid_request_200(self.token_dict['signed_token_scoped'])
+ self.assert_valid_request_200(
+ self.token_dict['signed_token_scoped_pkiz'])
+
+ def test_fallback_to_online_validation_with_ca_error(self):
+ self.requests.register_uri('GET',
+ '%s/v3/OS-SIMPLE-CERT/ca' % BASE_URI,
+ status_code=404)
+ self.assert_valid_request_200(self.token_dict['signed_token_scoped'])
+ self.assert_valid_request_200(
+ self.token_dict['signed_token_scoped_pkiz'])
+
+ def test_fallback_to_online_validation_with_revocation_list_error(self):
+ self.requests.register_uri('GET',
+ '%s/v2.0/tokens/revoked' % BASE_URI,
+ status_code=404)
+ self.assert_valid_request_200(self.token_dict['signed_token_scoped'])
+ self.assert_valid_request_200(
+ self.token_dict['signed_token_scoped_pkiz'])
+
+ def test_user_plugin_token_properties(self):
+ req = webob.Request.blank('/')
+ req.headers['X-Service-Catalog'] = '[]'
+ token = self.examples.v3_UUID_TOKEN_DEFAULT
+ token_data = self.examples.TOKEN_RESPONSES[token]
+ req.headers['X-Auth-Token'] = token
+ req.headers['X-Service-Token'] = token
+
+ body = self.middleware(req.environ, self.start_fake_response)
+ self.assertEqual(self.response_status, 200)
+ self.assertEqual([FakeApp.SUCCESS], body)
+
+ token_auth = req.environ['keystone.token_auth']
+
+ self.assertTrue(token_auth.has_user_token)
+ self.assertTrue(token_auth.has_service_token)
+
+ for t in [token_auth.user, token_auth.service]:
+ self.assertEqual(token_data.user_id, t.user_id)
+ self.assertEqual(token_data.project_id, t.project_id)
+ self.assertEqual(token_data.user_domain_id, t.user_domain_id)
+ self.assertEqual(token_data.project_domain_id, t.project_domain_id)
+
+ self.assertThat(t.role_names, matchers.HasLength(2))
+ self.assertIn('role1', t.role_names)
+ self.assertIn('role2', t.role_names)
+
+ self.assertIsNone(t.trust_id)
+
+
+class TokenExpirationTest(BaseAuthTokenMiddlewareTest):
+ def setUp(self):
+ super(TokenExpirationTest, self).setUp()
+ self.now = timeutils.utcnow()
+ self.delta = datetime.timedelta(hours=1)
+ self.one_hour_ago = timeutils.isotime(self.now - self.delta,
+ subsecond=True)
+ self.one_hour_earlier = timeutils.isotime(self.now + self.delta,
+ subsecond=True)
+
+ def create_v2_token_fixture(self, expires=None):
+ v2_fixture = {
+ 'access': {
+ 'token': {
+ 'id': 'blah',
+ 'expires': expires or self.one_hour_earlier,
+ 'tenant': {
+ 'id': 'tenant_id1',
+ 'name': 'tenant_name1',
+ },
+ },
+ 'user': {
+ 'id': 'user_id1',
+ 'name': 'user_name1',
+ 'roles': [
+ {'name': 'role1'},
+ {'name': 'role2'},
+ ],
+ },
+ 'serviceCatalog': {}
+ },
+ }
+
+ return v2_fixture
+
+ def create_v3_token_fixture(self, expires=None):
+
+ v3_fixture = {
+ 'token': {
+ 'expires_at': expires or self.one_hour_earlier,
+ 'user': {
+ 'id': 'user_id1',
+ 'name': 'user_name1',
+ 'domain': {
+ 'id': 'domain_id1',
+ 'name': 'domain_name1'
+ }
+ },
+ 'project': {
+ 'id': 'tenant_id1',
+ 'name': 'tenant_name1',
+ 'domain': {
+ 'id': 'domain_id1',
+ 'name': 'domain_name1'
+ }
+ },
+ 'roles': [
+ {'name': 'role1', 'id': 'Role1'},
+ {'name': 'role2', 'id': 'Role2'},
+ ],
+ 'catalog': {}
+ }
+ }
+
+ return v3_fixture
+
+ def test_no_data(self):
+ data = {}
+ self.assertRaises(exc.InvalidToken,
+ auth_token._get_token_expiration,
+ data)
+
+ def test_bad_data(self):
+ data = {'my_happy_token_dict': 'woo'}
+ self.assertRaises(exc.InvalidToken,
+ auth_token._get_token_expiration,
+ data)
+
+ def test_v2_token_get_token_expiration_return_isotime(self):
+ data = self.create_v2_token_fixture()
+ actual_expires = auth_token._get_token_expiration(data)
+ self.assertEqual(self.one_hour_earlier, actual_expires)
+
+ def test_v2_token_not_expired(self):
+ data = self.create_v2_token_fixture()
+ expected_expires = data['access']['token']['expires']
+ actual_expires = auth_token._get_token_expiration(data)
+ self.assertEqual(actual_expires, expected_expires)
+
+ def test_v2_token_expired(self):
+ data = self.create_v2_token_fixture(expires=self.one_hour_ago)
+ expires = auth_token._get_token_expiration(data)
+ self.assertRaises(exc.InvalidToken,
+ auth_token._confirm_token_not_expired,
+ expires)
+
+ def test_v2_token_with_timezone_offset_not_expired(self):
+ self.useFixture(TimeFixture('2000-01-01T00:01:10.000123Z'))
+ data = self.create_v2_token_fixture(
+ expires='2000-01-01T05:05:10.000123Z')
+ expected_expires = '2000-01-01T05:05:10.000123Z'
+ actual_expires = auth_token._get_token_expiration(data)
+ self.assertEqual(actual_expires, expected_expires)
+
+ def test_v2_token_with_timezone_offset_expired(self):
+ self.useFixture(TimeFixture('2000-01-01T00:01:10.000123Z'))
+ data = self.create_v2_token_fixture(
+ expires='1999-12-31T19:05:10Z')
+ expires = auth_token._get_token_expiration(data)
+ self.assertRaises(exc.InvalidToken,
+ auth_token._confirm_token_not_expired,
+ expires)
+
+ def test_v3_token_get_token_expiration_return_isotime(self):
+ data = self.create_v3_token_fixture()
+ actual_expires = auth_token._get_token_expiration(data)
+ self.assertEqual(self.one_hour_earlier, actual_expires)
+
+ def test_v3_token_not_expired(self):
+ data = self.create_v3_token_fixture()
+ expected_expires = data['token']['expires_at']
+ actual_expires = auth_token._get_token_expiration(data)
+ self.assertEqual(actual_expires, expected_expires)
+
+ def test_v3_token_expired(self):
+ data = self.create_v3_token_fixture(expires=self.one_hour_ago)
+ expires = auth_token._get_token_expiration(data)
+ self.assertRaises(exc.InvalidToken,
+ auth_token._confirm_token_not_expired,
+ expires)
+
+ def test_v3_token_with_timezone_offset_not_expired(self):
+ self.useFixture(TimeFixture('2000-01-01T00:01:10.000123Z'))
+ data = self.create_v3_token_fixture(
+ expires='2000-01-01T05:05:10.000123Z')
+ expected_expires = '2000-01-01T05:05:10.000123Z'
+
+ actual_expires = auth_token._get_token_expiration(data)
+ self.assertEqual(actual_expires, expected_expires)
+
+ def test_v3_token_with_timezone_offset_expired(self):
+ self.useFixture(TimeFixture('2000-01-01T00:01:10.000123Z'))
+ data = self.create_v3_token_fixture(
+ expires='1999-12-31T19:05:10Z')
+ expires = auth_token._get_token_expiration(data)
+ self.assertRaises(exc.InvalidToken,
+ auth_token._confirm_token_not_expired,
+ expires)
+
+ def test_cached_token_not_expired(self):
+ token = 'mytoken'
+ data = 'this_data'
+ self.set_middleware()
+ self.middleware._token_cache.initialize({})
+ some_time_later = timeutils.strtime(at=(self.now + self.delta))
+ expires = some_time_later
+ self.middleware._token_cache.store(token, data, expires)
+ self.assertEqual(self.middleware._token_cache._cache_get(token), data)
+
+ def test_cached_token_not_expired_with_old_style_nix_timestamp(self):
+ """Ensure we cannot retrieve a token from the cache.
+
+ Getting a token from the cache should return None when the token data
+ in the cache stores the expires time as a \*nix style timestamp.
+
+ """
+ token = 'mytoken'
+ data = 'this_data'
+ self.set_middleware()
+ token_cache = self.middleware._token_cache
+ token_cache.initialize({})
+ some_time_later = self.now + self.delta
+ # Store a unix timestamp in the cache.
+ expires = calendar.timegm(some_time_later.timetuple())
+ token_cache.store(token, data, expires)
+ self.assertIsNone(token_cache._cache_get(token))
+
+ def test_cached_token_expired(self):
+ token = 'mytoken'
+ data = 'this_data'
+ self.set_middleware()
+ self.middleware._token_cache.initialize({})
+ some_time_earlier = timeutils.strtime(at=(self.now - self.delta))
+ expires = some_time_earlier
+ self.middleware._token_cache.store(token, data, expires)
+ self.assertThat(lambda: self.middleware._token_cache._cache_get(token),
+ matchers.raises(exc.InvalidToken))
+
+ def test_cached_token_with_timezone_offset_not_expired(self):
+ token = 'mytoken'
+ data = 'this_data'
+ self.set_middleware()
+ self.middleware._token_cache.initialize({})
+ timezone_offset = datetime.timedelta(hours=2)
+ some_time_later = self.now - timezone_offset + self.delta
+ expires = timeutils.strtime(some_time_later) + '-02:00'
+ self.middleware._token_cache.store(token, data, expires)
+ self.assertEqual(self.middleware._token_cache._cache_get(token), data)
+
+ def test_cached_token_with_timezone_offset_expired(self):
+ token = 'mytoken'
+ data = 'this_data'
+ self.set_middleware()
+ self.middleware._token_cache.initialize({})
+ timezone_offset = datetime.timedelta(hours=2)
+ some_time_earlier = self.now - timezone_offset - self.delta
+ expires = timeutils.strtime(some_time_earlier) + '-02:00'
+ self.middleware._token_cache.store(token, data, expires)
+ self.assertThat(lambda: self.middleware._token_cache._cache_get(token),
+ matchers.raises(exc.InvalidToken))
+
+
+class CatalogConversionTests(BaseAuthTokenMiddlewareTest):
+
+ PUBLIC_URL = 'http://server:5000/v2.0'
+ ADMIN_URL = 'http://admin:35357/v2.0'
+ INTERNAL_URL = 'http://internal:5000/v2.0'
+
+ REGION_ONE = 'RegionOne'
+ REGION_TWO = 'RegionTwo'
+ REGION_THREE = 'RegionThree'
+
+ def test_basic_convert(self):
+ token = fixture.V3Token()
+ s = token.add_service(type='identity')
+ s.add_standard_endpoints(public=self.PUBLIC_URL,
+ admin=self.ADMIN_URL,
+ internal=self.INTERNAL_URL,
+ region=self.REGION_ONE)
+
+ auth_ref = access.AccessInfo.factory(body=token)
+ catalog_data = auth_ref.service_catalog.get_data()
+ catalog = auth_token._v3_to_v2_catalog(catalog_data)
+
+ self.assertEqual(1, len(catalog))
+ service = catalog[0]
+ self.assertEqual(1, len(service['endpoints']))
+ endpoints = service['endpoints'][0]
+
+ self.assertEqual('identity', service['type'])
+ self.assertEqual(4, len(endpoints))
+ self.assertEqual(self.PUBLIC_URL, endpoints['publicURL'])
+ self.assertEqual(self.ADMIN_URL, endpoints['adminURL'])
+ self.assertEqual(self.INTERNAL_URL, endpoints['internalURL'])
+ self.assertEqual(self.REGION_ONE, endpoints['region'])
+
+ def test_multi_region(self):
+ token = fixture.V3Token()
+ s = token.add_service(type='identity')
+
+ s.add_endpoint('internal', self.INTERNAL_URL, region=self.REGION_ONE)
+ s.add_endpoint('public', self.PUBLIC_URL, region=self.REGION_TWO)
+ s.add_endpoint('admin', self.ADMIN_URL, region=self.REGION_THREE)
+
+ auth_ref = access.AccessInfo.factory(body=token)
+ catalog_data = auth_ref.service_catalog.get_data()
+ catalog = auth_token._v3_to_v2_catalog(catalog_data)
+
+ self.assertEqual(1, len(catalog))
+ service = catalog[0]
+
+ # the 3 regions will come through as 3 separate endpoints
+ expected = [{'internalURL': self.INTERNAL_URL,
+ 'region': self.REGION_ONE},
+ {'publicURL': self.PUBLIC_URL,
+ 'region': self.REGION_TWO},
+ {'adminURL': self.ADMIN_URL,
+ 'region': self.REGION_THREE}]
+
+ self.assertEqual('identity', service['type'])
+ self.assertEqual(3, len(service['endpoints']))
+ for e in expected:
+ self.assertIn(e, expected)
+
+
+class DelayedAuthTests(BaseAuthTokenMiddlewareTest):
+
+ def test_header_in_401(self):
+ body = uuid.uuid4().hex
+ auth_uri = 'http://local.test'
+ conf = {'delay_auth_decision': 'True',
+ 'auth_version': 'v3.0',
+ 'auth_uri': auth_uri}
+
+ self.fake_app = new_app('401 Unauthorized', body)
+ self.set_middleware(conf=conf)
+
+ req = webob.Request.blank('/')
+ resp = self.middleware(req.environ, self.start_fake_response)
+
+ self.assertEqual([six.b(body)], resp)
+
+ self.assertEqual(401, self.response_status)
+ self.assertEqual("Keystone uri='%s'" % auth_uri,
+ self.response_headers['WWW-Authenticate'])
+
+ def test_delayed_auth_values(self):
+ fake_app = new_app('401 Unauthorized', uuid.uuid4().hex)
+ middleware = auth_token.AuthProtocol(fake_app,
+ {'auth_uri': 'http://local.test'})
+ self.assertFalse(middleware._delay_auth_decision)
+
+ for v in ('True', '1', 'on', 'yes'):
+ conf = {'delay_auth_decision': v,
+ 'auth_uri': 'http://local.test'}
+
+ middleware = auth_token.AuthProtocol(fake_app, conf)
+ self.assertTrue(middleware._delay_auth_decision)
+
+ for v in ('False', '0', 'no'):
+ conf = {'delay_auth_decision': v,
+ 'auth_uri': 'http://local.test'}
+
+ middleware = auth_token.AuthProtocol(fake_app, conf)
+ self.assertFalse(middleware._delay_auth_decision)
+
+ def test_auth_plugin_with_no_tokens(self):
+ body = uuid.uuid4().hex
+ auth_uri = 'http://local.test'
+ conf = {'delay_auth_decision': True, 'auth_uri': auth_uri}
+ self.fake_app = new_app('200 OK', body)
+ self.set_middleware(conf=conf)
+
+ req = webob.Request.blank('/')
+ resp = self.middleware(req.environ, self.start_fake_response)
+
+ self.assertEqual([six.b(body)], resp)
+
+ token_auth = req.environ['keystone.token_auth']
+
+ self.assertFalse(token_auth.has_user_token)
+ self.assertIsNone(token_auth.user)
+ self.assertFalse(token_auth.has_service_token)
+ self.assertIsNone(token_auth.service)
+
+
+class CommonCompositeAuthTests(object):
+ """Test Composite authentication.
+
+ Test the behaviour of adding a service-token.
+ """
+
+ def test_composite_auth_ok(self):
+ req = webob.Request.blank('/')
+ token = self.token_dict['uuid_token_default']
+ service_token = self.token_dict['uuid_service_token_default']
+ req.headers['X-Auth-Token'] = token
+ req.headers['X-Service-Token'] = service_token
+ fake_logger = fixtures.FakeLogger(level=logging.DEBUG)
+ self.middleware.logger = self.useFixture(fake_logger)
+ body = self.middleware(req.environ, self.start_fake_response)
+ self.assertEqual(200, self.response_status)
+ self.assertEqual([FakeApp.SUCCESS], body)
+ expected_env = dict(EXPECTED_V2_DEFAULT_ENV_RESPONSE)
+ expected_env.update(EXPECTED_V2_DEFAULT_SERVICE_ENV_RESPONSE)
+ self.assertIn('Received request from user: '
+ 'user_id %(HTTP_X_USER_ID)s, '
+ 'project_id %(HTTP_X_TENANT_ID)s, '
+ 'roles %(HTTP_X_ROLES)s '
+ 'service: user_id %(HTTP_X_SERVICE_USER_ID)s, '
+ 'project_id %(HTTP_X_SERVICE_PROJECT_ID)s, '
+ 'roles %(HTTP_X_SERVICE_ROLES)s' % expected_env,
+ fake_logger.output)
+
+ def test_composite_auth_invalid_service_token(self):
+ req = webob.Request.blank('/')
+ token = self.token_dict['uuid_token_default']
+ service_token = 'invalid-service-token'
+ req.headers['X-Auth-Token'] = token
+ req.headers['X-Service-Token'] = service_token
+ body = self.middleware(req.environ, self.start_fake_response)
+ self.assertEqual(401, self.response_status)
+ self.assertEqual([b'Authentication required'], body)
+
+ def test_composite_auth_no_service_token(self):
+ self.purge_service_token_expected_env()
+ req = webob.Request.blank('/')
+ token = self.token_dict['uuid_token_default']
+ req.headers['X-Auth-Token'] = token
+
+ # Ensure injection of service headers is not possible
+ for key, value in six.iteritems(self.service_token_expected_env):
+ header_key = key[len('HTTP_'):].replace('_', '-')
+ req.headers[header_key] = value
+ # Check arbitrary headers not removed
+ req.headers['X-Foo'] = 'Bar'
+ body = self.middleware(req.environ, self.start_fake_response)
+ for key in six.iterkeys(self.service_token_expected_env):
+ header_key = key[len('HTTP_'):].replace('_', '-')
+ self.assertFalse(req.headers.get(header_key))
+ self.assertEqual('Bar', req.headers.get('X-Foo'))
+ self.assertEqual(418, self.response_status)
+ self.assertEqual([FakeApp.FORBIDDEN], body)
+
+ def test_composite_auth_invalid_user_token(self):
+ req = webob.Request.blank('/')
+ token = 'invalid-token'
+ service_token = self.token_dict['uuid_service_token_default']
+ req.headers['X-Auth-Token'] = token
+ req.headers['X-Service-Token'] = service_token
+ body = self.middleware(req.environ, self.start_fake_response)
+ self.assertEqual(401, self.response_status)
+ self.assertEqual([b'Authentication required'], body)
+
+ def test_composite_auth_no_user_token(self):
+ req = webob.Request.blank('/')
+ service_token = self.token_dict['uuid_service_token_default']
+ req.headers['X-Service-Token'] = service_token
+ body = self.middleware(req.environ, self.start_fake_response)
+ self.assertEqual(401, self.response_status)
+ self.assertEqual([b'Authentication required'], body)
+
+ def test_composite_auth_delay_ok(self):
+ self.middleware._delay_auth_decision = True
+ req = webob.Request.blank('/')
+ token = self.token_dict['uuid_token_default']
+ service_token = self.token_dict['uuid_service_token_default']
+ req.headers['X-Auth-Token'] = token
+ req.headers['X-Service-Token'] = service_token
+ body = self.middleware(req.environ, self.start_fake_response)
+ self.assertEqual(200, self.response_status)
+ self.assertEqual([FakeApp.SUCCESS], body)
+
+ def test_composite_auth_delay_invalid_service_token(self):
+ self.middleware._delay_auth_decision = True
+ self.purge_service_token_expected_env()
+ expected_env = {
+ 'HTTP_X_SERVICE_IDENTITY_STATUS': 'Invalid',
+ }
+ self.update_expected_env(expected_env)
+
+ req = webob.Request.blank('/')
+ token = self.token_dict['uuid_token_default']
+ service_token = 'invalid-service-token'
+ req.headers['X-Auth-Token'] = token
+ req.headers['X-Service-Token'] = service_token
+ body = self.middleware(req.environ, self.start_fake_response)
+ self.assertEqual(420, self.response_status)
+ self.assertEqual([FakeApp.FORBIDDEN], body)
+
+ def test_composite_auth_delay_invalid_service_and_user_tokens(self):
+ self.middleware._delay_auth_decision = True
+ self.purge_service_token_expected_env()
+ self.purge_token_expected_env()
+ expected_env = {
+ 'HTTP_X_IDENTITY_STATUS': 'Invalid',
+ 'HTTP_X_SERVICE_IDENTITY_STATUS': 'Invalid',
+ }
+ self.update_expected_env(expected_env)
+
+ req = webob.Request.blank('/')
+ token = 'invalid-user-token'
+ service_token = 'invalid-service-token'
+ req.headers['X-Auth-Token'] = token
+ req.headers['X-Service-Token'] = service_token
+ body = self.middleware(req.environ, self.start_fake_response)
+ self.assertEqual(419, self.response_status)
+ self.assertEqual([FakeApp.FORBIDDEN], body)
+
+ def test_composite_auth_delay_no_service_token(self):
+ self.middleware._delay_auth_decision = True
+ self.purge_service_token_expected_env()
+
+ req = webob.Request.blank('/')
+ token = self.token_dict['uuid_token_default']
+ req.headers['X-Auth-Token'] = token
+
+ # Ensure injection of service headers is not possible
+ for key, value in six.iteritems(self.service_token_expected_env):
+ header_key = key[len('HTTP_'):].replace('_', '-')
+ req.headers[header_key] = value
+ # Check arbitrary headers not removed
+ req.headers['X-Foo'] = 'Bar'
+ body = self.middleware(req.environ, self.start_fake_response)
+ for key in six.iterkeys(self.service_token_expected_env):
+ header_key = key[len('HTTP_'):].replace('_', '-')
+ self.assertFalse(req.headers.get(header_key))
+ self.assertEqual('Bar', req.headers.get('X-Foo'))
+ self.assertEqual(418, self.response_status)
+ self.assertEqual([FakeApp.FORBIDDEN], body)
+
+ def test_composite_auth_delay_invalid_user_token(self):
+ self.middleware._delay_auth_decision = True
+ self.purge_token_expected_env()
+ expected_env = {
+ 'HTTP_X_IDENTITY_STATUS': 'Invalid',
+ }
+ self.update_expected_env(expected_env)
+
+ req = webob.Request.blank('/')
+ token = 'invalid-token'
+ service_token = self.token_dict['uuid_service_token_default']
+ req.headers['X-Auth-Token'] = token
+ req.headers['X-Service-Token'] = service_token
+ body = self.middleware(req.environ, self.start_fake_response)
+ self.assertEqual(403, self.response_status)
+ self.assertEqual([FakeApp.FORBIDDEN], body)
+
+ def test_composite_auth_delay_no_user_token(self):
+ self.middleware._delay_auth_decision = True
+ self.purge_token_expected_env()
+ expected_env = {
+ 'HTTP_X_IDENTITY_STATUS': 'Invalid',
+ }
+ self.update_expected_env(expected_env)
+
+ req = webob.Request.blank('/')
+ service_token = self.token_dict['uuid_service_token_default']
+ req.headers['X-Service-Token'] = service_token
+ body = self.middleware(req.environ, self.start_fake_response)
+ self.assertEqual(403, self.response_status)
+ self.assertEqual([FakeApp.FORBIDDEN], body)
+
+
+class v2CompositeAuthTests(BaseAuthTokenMiddlewareTest,
+ CommonCompositeAuthTests,
+ testresources.ResourcedTestCase):
+ """Test auth_token middleware with v2 token based composite auth.
+
+ Execute the Composite auth class tests, but with the
+ auth_token middleware configured to expect v2 tokens back from
+ a keystone server.
+ """
+
+ resources = [('examples', client_fixtures.EXAMPLES_RESOURCE)]
+
+ def setUp(self):
+ super(v2CompositeAuthTests, self).setUp(
+ expected_env=EXPECTED_V2_DEFAULT_SERVICE_ENV_RESPONSE,
+ fake_app=CompositeFakeApp)
+
+ uuid_token_default = self.examples.UUID_TOKEN_DEFAULT
+ uuid_service_token_default = self.examples.UUID_SERVICE_TOKEN_DEFAULT
+ self.token_dict = {
+ 'uuid_token_default': uuid_token_default,
+ 'uuid_service_token_default': uuid_service_token_default,
+ }
+
+ self.requests.get(BASE_URI,
+ json=VERSION_LIST_v2,
+ status_code=300)
+
+ self.requests.post('%s/v2.0/tokens' % BASE_URI,
+ text=FAKE_ADMIN_TOKEN)
+
+ self.requests.get('%s/v2.0/tokens/revoked' % BASE_URI,
+ text=self.examples.SIGNED_REVOCATION_LIST,
+ status_code=200)
+
+ for token in (self.examples.UUID_TOKEN_DEFAULT,
+ self.examples.UUID_SERVICE_TOKEN_DEFAULT,):
+ self.requests.get('%s/v2.0/tokens/%s' % (BASE_URI, token),
+ text=self.examples.JSON_TOKEN_RESPONSES[token])
+
+ for invalid_uri in ("%s/v2.0/tokens/invalid-token" % BASE_URI,
+ "%s/v2.0/tokens/invalid-service-token" % BASE_URI):
+ self.requests.get(invalid_uri, text='', status_code=404)
+
+ self.token_expected_env = dict(EXPECTED_V2_DEFAULT_ENV_RESPONSE)
+ self.service_token_expected_env = dict(
+ EXPECTED_V2_DEFAULT_SERVICE_ENV_RESPONSE)
+ self.set_middleware()
+
+
+class v3CompositeAuthTests(BaseAuthTokenMiddlewareTest,
+ CommonCompositeAuthTests,
+ testresources.ResourcedTestCase):
+ """Test auth_token middleware with v3 token based composite auth.
+
+ Execute the Composite auth class tests, but with the
+ auth_token middleware configured to expect v3 tokens back from
+ a keystone server.
+ """
+
+ resources = [('examples', client_fixtures.EXAMPLES_RESOURCE)]
+
+ def setUp(self):
+ super(v3CompositeAuthTests, self).setUp(
+ auth_version='v3.0',
+ fake_app=v3CompositeFakeApp)
+
+ uuid_token_default = self.examples.v3_UUID_TOKEN_DEFAULT
+ uuid_serv_token_default = self.examples.v3_UUID_SERVICE_TOKEN_DEFAULT
+ self.token_dict = {
+ 'uuid_token_default': uuid_token_default,
+ 'uuid_service_token_default': uuid_serv_token_default,
+ }
+
+ self.requests.get(BASE_URI, json=VERSION_LIST_v3, status_code=300)
+
+ # TODO(jamielennox): auth_token middleware uses a v2 admin token
+ # regardless of the auth_version that is set.
+ self.requests.post('%s/v2.0/tokens' % BASE_URI,
+ text=FAKE_ADMIN_TOKEN)
+
+ # TODO(jamielennox): there is no v3 revocation url yet, it uses v2
+ self.requests.get('%s/v2.0/tokens/revoked' % BASE_URI,
+ text=self.examples.SIGNED_REVOCATION_LIST)
+
+ self.requests.get('%s/v3/auth/tokens' % BASE_URI,
+ text=self.token_response)
+
+ self.token_expected_env = dict(EXPECTED_V2_DEFAULT_ENV_RESPONSE)
+ self.token_expected_env.update(EXPECTED_V3_DEFAULT_ENV_ADDITIONS)
+ self.service_token_expected_env = dict(
+ EXPECTED_V2_DEFAULT_SERVICE_ENV_RESPONSE)
+ self.service_token_expected_env.update(
+ EXPECTED_V3_DEFAULT_SERVICE_ENV_ADDITIONS)
+ self.set_middleware()
+
+ def token_response(self, request, context):
+ auth_id = request.headers.get('X-Auth-Token')
+ token_id = request.headers.get('X-Subject-Token')
+ self.assertEqual(auth_id, FAKE_ADMIN_TOKEN_ID)
+
+ status = 200
+ response = ""
+
+ if token_id == ERROR_TOKEN:
+ raise exceptions.ConnectionError("Network connection error.")
+
+ try:
+ response = self.examples.JSON_TOKEN_RESPONSES[token_id]
+ except KeyError:
+ status = 404
+
+ context.status_code = status
+ return response
+
+
+class OtherTests(BaseAuthTokenMiddlewareTest):
+
+ def setUp(self):
+ super(OtherTests, self).setUp()
+ self.logger = self.useFixture(fixtures.FakeLogger())
+ self.cfg = self.useFixture(cfg_fixture.Config())
+
+ def test_unknown_server_versions(self):
+ versions = fixture.DiscoveryList(v2=False, v3_id='v4', href=BASE_URI)
+ self.set_middleware()
+
+ self.requests.get(BASE_URI, json=versions, status_code=300)
+
+ req = webob.Request.blank('/')
+ req.headers['X-Auth-Token'] = uuid.uuid4().hex
+ self.middleware(req.environ, self.start_fake_response)
+ self.assertEqual(503, self.response_status)
+
+ self.assertIn('versions [v3.0, v2.0]', self.logger.output)
+
+ def _assert_auth_version(self, conf_version, identity_server_version):
+ self.set_middleware(conf={'auth_version': conf_version})
+ identity_server = self.middleware._create_identity_server()
+ self.assertEqual(identity_server_version,
+ identity_server.auth_version)
+
+ def test_micro_version(self):
+ self._assert_auth_version('v2', (2, 0))
+ self._assert_auth_version('v2.0', (2, 0))
+ self._assert_auth_version('v3', (3, 0))
+ self._assert_auth_version('v3.0', (3, 0))
+ self._assert_auth_version('v3.1', (3, 0))
+ self._assert_auth_version('v3.2', (3, 0))
+ self._assert_auth_version('v3.9', (3, 0))
+ self._assert_auth_version('v3.3.1', (3, 0))
+ self._assert_auth_version('v3.3.5', (3, 0))
+
+ def test_default_auth_version(self):
+ # VERSION_LIST_v3 contains both v2 and v3 version elements
+ self.requests.get(BASE_URI, json=VERSION_LIST_v3, status_code=300)
+ self._assert_auth_version(None, (3, 0))
+
+ # VERSION_LIST_v2 contains only v2 version elements
+ self.requests.get(BASE_URI, json=VERSION_LIST_v2, status_code=300)
+ self._assert_auth_version(None, (2, 0))
+
+ def test_unsupported_auth_version(self):
+ # If the requested version isn't supported we will use v2
+ self._assert_auth_version('v1', (2, 0))
+ self._assert_auth_version('v10', (2, 0))
+
+
+class AuthProtocolLoadingTests(BaseAuthTokenMiddlewareTest):
+
+ AUTH_URL = 'http://auth.url/prefix'
+ DISC_URL = 'http://disc.url/prefix'
+ KEYSTONE_BASE_URL = 'http://keystone.url/prefix'
+ CRUD_URL = 'http://crud.url/prefix'
+
+ # NOTE(jamielennox): use the /v2.0 prefix here because this is what's most
+ # likely to be in the service catalog and we should be able to ignore it.
+ KEYSTONE_URL = KEYSTONE_BASE_URL + '/v2.0'
+
+ def setUp(self):
+ super(AuthProtocolLoadingTests, self).setUp()
+ self.cfg = self.useFixture(cfg_fixture.Config())
+
+ self.project_id = uuid.uuid4().hex
+
+ # first touch is to discover the available versions at the auth_url
+ self.requests.get(self.AUTH_URL,
+ json=fixture.DiscoveryList(href=self.DISC_URL),
+ status_code=300)
+
+ # then we do discovery on the URL from the service catalog. In practice
+ # this is mostly the same URL as before but test the full range.
+ self.requests.get(self.KEYSTONE_BASE_URL + '/',
+ json=fixture.DiscoveryList(href=self.CRUD_URL),
+ status_code=300)
+
+ def good_request(self, app):
+ # admin_token is the token that the service will get back from auth
+ admin_token_id = uuid.uuid4().hex
+ admin_token = fixture.V3Token(project_id=self.project_id)
+ s = admin_token.add_service('identity', name='keystone')
+ s.add_standard_endpoints(admin=self.KEYSTONE_URL)
+
+ self.requests.post(self.DISC_URL + '/v3/auth/tokens',
+ json=admin_token,
+ headers={'X-Subject-Token': admin_token_id})
+
+ # user_token is the data from the user's inputted token
+ user_token_id = uuid.uuid4().hex
+ user_token = fixture.V3Token()
+ user_token.set_project_scope()
+
+ request_headers = {'X-Subject-Token': user_token_id,
+ 'X-Auth-Token': admin_token_id}
+
+ self.requests.get(self.CRUD_URL + '/v3/auth/tokens',
+ request_headers=request_headers,
+ json=user_token)
+
+ req = webob.Request.blank('/')
+ req.headers['X-Auth-Token'] = user_token_id
+ resp = app(req.environ, self.start_fake_response)
+
+ self.assertEqual(200, self.response_status)
+ return resp
+
+ def test_loading_password_plugin(self):
+ # the password options aren't set on config until loading time, but we
+ # need them set so we can override the values for testing, so force it
+ opts = auth.get_plugin_options('password')
+ self.cfg.register_opts(opts, group=_base.AUTHTOKEN_GROUP)
+
+ project_id = uuid.uuid4().hex
+
+ # configure the authentication options
+ self.cfg.config(auth_plugin='password',
+ username='testuser',
+ password='testpass',
+ auth_url=self.AUTH_URL,
+ project_id=project_id,
+ user_domain_id='userdomainid',
+ group=_base.AUTHTOKEN_GROUP)
+
+ body = uuid.uuid4().hex
+ app = auth_token.AuthProtocol(new_app('200 OK', body)(), {})
+
+ resp = self.good_request(app)
+ self.assertEqual(six.b(body), resp[0])
+
+ @staticmethod
+ def get_plugin(app):
+ return app._identity_server._adapter.auth
+
+ def test_invalid_plugin_fails_to_intialize(self):
+ self.cfg.config(auth_plugin=uuid.uuid4().hex,
+ group=_base.AUTHTOKEN_GROUP)
+
+ self.assertRaises(
+ exceptions.NoMatchingPlugin,
+ lambda: auth_token.AuthProtocol(new_app('200 OK', '')(), {}))
+
+ def test_plugin_loading_mixed_opts(self):
+ # some options via override and some via conf
+ opts = auth.get_plugin_options('password')
+ self.cfg.register_opts(opts, group=_base.AUTHTOKEN_GROUP)
+
+ username = 'testuser'
+ password = 'testpass'
+
+ # configure the authentication options
+ self.cfg.config(auth_plugin='password',
+ password=password,
+ project_id=self.project_id,
+ user_domain_id='userdomainid',
+ group=_base.AUTHTOKEN_GROUP)
+
+ conf = {'username': username, 'auth_url': self.AUTH_URL}
+
+ body = uuid.uuid4().hex
+ app = auth_token.AuthProtocol(new_app('200 OK', body)(), conf)
+
+ resp = self.good_request(app)
+ self.assertEqual(six.b(body), resp[0])
+
+ plugin = self.get_plugin(app)
+
+ self.assertEqual(self.AUTH_URL, plugin.auth_url)
+ self.assertEqual(username, plugin._username)
+ self.assertEqual(password, plugin._password)
+ self.assertEqual(self.project_id, plugin._project_id)
+
+ def test_plugin_loading_with_auth_section(self):
+ # some options via override and some via conf
+ section = 'testsection'
+ username = 'testuser'
+ password = 'testpass'
+
+ auth.register_conf_options(self.cfg.conf, group=section)
+ opts = auth.get_plugin_options('password')
+ self.cfg.register_opts(opts, group=section)
+
+ # configure the authentication options
+ self.cfg.config(auth_section=section, group=_base.AUTHTOKEN_GROUP)
+ self.cfg.config(auth_plugin='password',
+ password=password,
+ project_id=self.project_id,
+ user_domain_id='userdomainid',
+ group=section)
+
+ conf = {'username': username, 'auth_url': self.AUTH_URL}
+
+ body = uuid.uuid4().hex
+ app = auth_token.AuthProtocol(new_app('200 OK', body)(), conf)
+
+ resp = self.good_request(app)
+ self.assertEqual(six.b(body), resp[0])
+
+ plugin = self.get_plugin(app)
+
+ self.assertEqual(self.AUTH_URL, plugin.auth_url)
+ self.assertEqual(username, plugin._username)
+ self.assertEqual(password, plugin._password)
+ self.assertEqual(self.project_id, plugin._project_id)
+
+
+def load_tests(loader, tests, pattern):
+ return testresources.OptimisingTestSuite(tests)
diff --git a/keystonemiddleware-moon/keystonemiddleware/tests/unit/auth_token/test_connection_pool.py b/keystonemiddleware-moon/keystonemiddleware/tests/unit/auth_token/test_connection_pool.py
new file mode 100644
index 00000000..074d1e5d
--- /dev/null
+++ b/keystonemiddleware-moon/keystonemiddleware/tests/unit/auth_token/test_connection_pool.py
@@ -0,0 +1,118 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import time
+
+import mock
+from six.moves import queue
+import testtools
+from testtools import matchers
+
+from keystonemiddleware.auth_token import _memcache_pool
+from keystonemiddleware.tests.unit import utils
+
+
+class _TestConnectionPool(_memcache_pool.ConnectionPool):
+ destroyed_value = 'destroyed'
+
+ def _create_connection(self):
+ return mock.MagicMock()
+
+ def _destroy_connection(self, conn):
+ conn(self.destroyed_value)
+
+
+class TestConnectionPool(utils.TestCase):
+ def setUp(self):
+ super(TestConnectionPool, self).setUp()
+ self.unused_timeout = 10
+ self.maxsize = 2
+ self.connection_pool = _TestConnectionPool(
+ maxsize=self.maxsize,
+ unused_timeout=self.unused_timeout)
+
+ def test_get_context_manager(self):
+ self.assertThat(self.connection_pool.queue, matchers.HasLength(0))
+ with self.connection_pool.acquire() as conn:
+ self.assertEqual(1, self.connection_pool._acquired)
+ self.assertEqual(0, self.connection_pool._acquired)
+ self.assertThat(self.connection_pool.queue, matchers.HasLength(1))
+ self.assertEqual(conn, self.connection_pool.queue[0].connection)
+
+ def test_cleanup_pool(self):
+ self.test_get_context_manager()
+ newtime = time.time() + self.unused_timeout * 2
+ non_expired_connection = _memcache_pool._PoolItem(
+ ttl=(newtime * 2),
+ connection=mock.MagicMock())
+ self.connection_pool.queue.append(non_expired_connection)
+ self.assertThat(self.connection_pool.queue, matchers.HasLength(2))
+ with mock.patch.object(time, 'time', return_value=newtime):
+ conn = self.connection_pool.queue[0].connection
+ with self.connection_pool.acquire():
+ pass
+ conn.assert_has_calls(
+ [mock.call(self.connection_pool.destroyed_value)])
+ self.assertThat(self.connection_pool.queue, matchers.HasLength(1))
+ self.assertEqual(0, non_expired_connection.connection.call_count)
+
+ def test_acquire_conn_exception_returns_acquired_count(self):
+ class TestException(Exception):
+ pass
+
+ with mock.patch.object(_TestConnectionPool, '_create_connection',
+ side_effect=TestException):
+ with testtools.ExpectedException(TestException):
+ with self.connection_pool.acquire():
+ pass
+ self.assertThat(self.connection_pool.queue,
+ matchers.HasLength(0))
+ self.assertEqual(0, self.connection_pool._acquired)
+
+ def test_connection_pool_limits_maximum_connections(self):
+ # NOTE(morganfainberg): To ensure we don't lockup tests until the
+ # job limit, explicitly call .get_nowait() and .put_nowait() in this
+ # case.
+ conn1 = self.connection_pool.get_nowait()
+ conn2 = self.connection_pool.get_nowait()
+
+ # Use a nowait version to raise an Empty exception indicating we would
+ # not get another connection until one is placed back into the queue.
+ self.assertRaises(queue.Empty, self.connection_pool.get_nowait)
+
+ # Place the connections back into the pool.
+ self.connection_pool.put_nowait(conn1)
+ self.connection_pool.put_nowait(conn2)
+
+ # Make sure we can get a connection out of the pool again.
+ self.connection_pool.get_nowait()
+
+ def test_connection_pool_maximum_connection_get_timeout(self):
+ connection_pool = _TestConnectionPool(
+ maxsize=1,
+ unused_timeout=self.unused_timeout,
+ conn_get_timeout=0)
+
+ def _acquire_connection():
+ with connection_pool.acquire():
+ pass
+
+ # Make sure we've consumed the only available connection from the pool
+ conn = connection_pool.get_nowait()
+
+ self.assertRaises(_memcache_pool.ConnectionGetTimeoutException,
+ _acquire_connection)
+
+ # Put the connection back and ensure we can acquire the connection
+ # after it is available.
+ connection_pool.put_nowait(conn)
+ _acquire_connection()
diff --git a/keystonemiddleware-moon/keystonemiddleware/tests/unit/auth_token/test_memcache_crypt.py b/keystonemiddleware-moon/keystonemiddleware/tests/unit/auth_token/test_memcache_crypt.py
new file mode 100644
index 00000000..75c7f759
--- /dev/null
+++ b/keystonemiddleware-moon/keystonemiddleware/tests/unit/auth_token/test_memcache_crypt.py
@@ -0,0 +1,97 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import six
+import testtools
+
+from keystonemiddleware.auth_token import _memcache_crypt as memcache_crypt
+
+
+class MemcacheCryptPositiveTests(testtools.TestCase):
+ def _setup_keys(self, strategy):
+ return memcache_crypt.derive_keys(b'token', b'secret', strategy)
+
+ def test_constant_time_compare(self):
+ # make sure it works as a compare, the "constant time" aspect
+ # isn't appropriate to test in unittests
+ ctc = memcache_crypt.constant_time_compare
+ self.assertTrue(ctc('abcd', 'abcd'))
+ self.assertTrue(ctc('', ''))
+ self.assertFalse(ctc('abcd', 'efgh'))
+ self.assertFalse(ctc('abc', 'abcd'))
+ self.assertFalse(ctc('abc', 'abc\x00'))
+ self.assertFalse(ctc('', 'abc'))
+
+ # For Python 3, we want to test these functions with both str and bytes
+ # as input.
+ if six.PY3:
+ self.assertTrue(ctc(b'abcd', b'abcd'))
+ self.assertTrue(ctc(b'', b''))
+ self.assertFalse(ctc(b'abcd', b'efgh'))
+ self.assertFalse(ctc(b'abc', b'abcd'))
+ self.assertFalse(ctc(b'abc', b'abc\x00'))
+ self.assertFalse(ctc(b'', b'abc'))
+
+ def test_derive_keys(self):
+ keys = self._setup_keys(b'strategy')
+ self.assertEqual(len(keys['ENCRYPTION']),
+ len(keys['CACHE_KEY']))
+ self.assertEqual(len(keys['CACHE_KEY']),
+ len(keys['MAC']))
+ self.assertNotEqual(keys['ENCRYPTION'],
+ keys['MAC'])
+ self.assertIn('strategy', keys.keys())
+
+ def test_key_strategy_diff(self):
+ k1 = self._setup_keys(b'MAC')
+ k2 = self._setup_keys(b'ENCRYPT')
+ self.assertNotEqual(k1, k2)
+
+ def test_sign_data(self):
+ keys = self._setup_keys(b'MAC')
+ sig = memcache_crypt.sign_data(keys['MAC'], b'data')
+ self.assertEqual(len(sig), memcache_crypt.DIGEST_LENGTH_B64)
+
+ def test_encryption(self):
+ keys = self._setup_keys(b'ENCRYPT')
+ # what you put in is what you get out
+ for data in [b'data', b'1234567890123456', b'\x00\xFF' * 13
+ ] + [six.int2byte(x % 256) * x for x in range(768)]:
+ crypt = memcache_crypt.encrypt_data(keys['ENCRYPTION'], data)
+ decrypt = memcache_crypt.decrypt_data(keys['ENCRYPTION'], crypt)
+ self.assertEqual(data, decrypt)
+ self.assertRaises(memcache_crypt.DecryptError,
+ memcache_crypt.decrypt_data,
+ keys['ENCRYPTION'], crypt[:-1])
+
+ def test_protect_wrappers(self):
+ data = b'My Pretty Little Data'
+ for strategy in [b'MAC', b'ENCRYPT']:
+ keys = self._setup_keys(strategy)
+ protected = memcache_crypt.protect_data(keys, data)
+ self.assertNotEqual(protected, data)
+ if strategy == b'ENCRYPT':
+ self.assertNotIn(data, protected)
+ unprotected = memcache_crypt.unprotect_data(keys, protected)
+ self.assertEqual(data, unprotected)
+ self.assertRaises(memcache_crypt.InvalidMacError,
+ memcache_crypt.unprotect_data,
+ keys, protected[:-1])
+ self.assertIsNone(memcache_crypt.unprotect_data(keys, None))
+
+ def test_no_pycrypt(self):
+ aes = memcache_crypt.AES
+ memcache_crypt.AES = None
+ self.assertRaises(memcache_crypt.CryptoUnavailableError,
+ memcache_crypt.encrypt_data, 'token', 'secret',
+ 'data')
+ memcache_crypt.AES = aes
diff --git a/keystonemiddleware-moon/keystonemiddleware/tests/unit/auth_token/test_revocations.py b/keystonemiddleware-moon/keystonemiddleware/tests/unit/auth_token/test_revocations.py
new file mode 100644
index 00000000..d144bb6c
--- /dev/null
+++ b/keystonemiddleware-moon/keystonemiddleware/tests/unit/auth_token/test_revocations.py
@@ -0,0 +1,65 @@
+# Copyright 2014 IBM Corp.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import datetime
+import json
+import shutil
+import uuid
+
+import mock
+import testtools
+
+from keystonemiddleware.auth_token import _exceptions as exc
+from keystonemiddleware.auth_token import _revocations
+from keystonemiddleware.auth_token import _signing_dir
+
+
+class RevocationsTests(testtools.TestCase):
+
+ def _check_with_list(self, revoked_list, token_ids):
+ directory_name = '/tmp/%s' % uuid.uuid4().hex
+ signing_directory = _signing_dir.SigningDirectory(directory_name)
+ self.addCleanup(shutil.rmtree, directory_name)
+
+ identity_server = mock.Mock()
+
+ verify_result_obj = {
+ 'revoked': list({'id': r} for r in revoked_list)
+ }
+ cms_verify = mock.Mock(return_value=json.dumps(verify_result_obj))
+
+ revocations = _revocations.Revocations(
+ timeout=datetime.timedelta(1), signing_directory=signing_directory,
+ identity_server=identity_server, cms_verify=cms_verify)
+
+ revocations.check(token_ids)
+
+ def test_check_empty_list(self):
+ # When the identity server returns an empty list, a token isn't
+ # revoked.
+
+ revoked_tokens = []
+ token_ids = [uuid.uuid4().hex]
+ # No assert because this would raise
+ self._check_with_list(revoked_tokens, token_ids)
+
+ def test_check_revoked(self):
+ # When the identity server returns a list with a token in it, that
+ # token is revoked.
+
+ token_id = uuid.uuid4().hex
+ revoked_tokens = [token_id]
+ token_ids = [token_id]
+ self.assertRaises(exc.InvalidToken,
+ self._check_with_list, revoked_tokens, token_ids)
diff --git a/keystonemiddleware-moon/keystonemiddleware/tests/unit/auth_token/test_signing_dir.py b/keystonemiddleware-moon/keystonemiddleware/tests/unit/auth_token/test_signing_dir.py
new file mode 100644
index 00000000..bef62747
--- /dev/null
+++ b/keystonemiddleware-moon/keystonemiddleware/tests/unit/auth_token/test_signing_dir.py
@@ -0,0 +1,138 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import os
+import shutil
+import stat
+import uuid
+
+import testtools
+
+from keystonemiddleware.auth_token import _signing_dir
+
+
+class SigningDirectoryTests(testtools.TestCase):
+
+ def test_directory_created_when_doesnt_exist(self):
+ # When _SigningDirectory is created, if the directory doesn't exist
+ # it's created with the expected permissions.
+ tmp_name = uuid.uuid4().hex
+ parent_directory = '/tmp/%s' % tmp_name
+ directory_name = '/tmp/%s/%s' % ((tmp_name,) * 2)
+
+ # Directories are created by __init__.
+ _signing_dir.SigningDirectory(directory_name)
+ self.addCleanup(shutil.rmtree, parent_directory)
+
+ self.assertTrue(os.path.isdir(directory_name))
+ self.assertTrue(os.access(directory_name, os.W_OK))
+ self.assertEqual(os.stat(directory_name).st_uid, os.getuid())
+ self.assertEqual(stat.S_IMODE(os.stat(directory_name).st_mode),
+ stat.S_IRWXU)
+
+ def test_use_directory_already_exists(self):
+ # The directory can already exist.
+
+ tmp_name = uuid.uuid4().hex
+ parent_directory = '/tmp/%s' % tmp_name
+ directory_name = '/tmp/%s/%s' % ((tmp_name,) * 2)
+ os.makedirs(directory_name, stat.S_IRWXU)
+ self.addCleanup(shutil.rmtree, parent_directory)
+
+ _signing_dir.SigningDirectory(directory_name)
+
+ def test_write_file(self):
+ # write_file when the file doesn't exist creates the file.
+
+ signing_directory = _signing_dir.SigningDirectory()
+ self.addCleanup(shutil.rmtree, signing_directory._directory_name)
+
+ file_name = self.getUniqueString()
+ contents = self.getUniqueString()
+ signing_directory.write_file(file_name, contents)
+
+ file_path = signing_directory.calc_path(file_name)
+ with open(file_path) as f:
+ actual_contents = f.read()
+
+ self.assertEqual(contents, actual_contents)
+
+ def test_replace_file(self):
+ # write_file when the file already exists overwrites it.
+
+ signing_directory = _signing_dir.SigningDirectory()
+ self.addCleanup(shutil.rmtree, signing_directory._directory_name)
+
+ file_name = self.getUniqueString()
+ orig_contents = self.getUniqueString()
+ signing_directory.write_file(file_name, orig_contents)
+
+ new_contents = self.getUniqueString()
+ signing_directory.write_file(file_name, new_contents)
+
+ file_path = signing_directory.calc_path(file_name)
+ with open(file_path) as f:
+ actual_contents = f.read()
+
+ self.assertEqual(new_contents, actual_contents)
+
+ def test_recreate_directory(self):
+ # If the original directory is lost, it gets recreated when a file
+ # is written.
+
+ signing_directory = _signing_dir.SigningDirectory()
+ self.addCleanup(shutil.rmtree, signing_directory._directory_name)
+
+ # Delete the directory.
+ shutil.rmtree(signing_directory._directory_name)
+
+ file_name = self.getUniqueString()
+ contents = self.getUniqueString()
+ signing_directory.write_file(file_name, contents)
+
+ actual_contents = signing_directory.read_file(file_name)
+ self.assertEqual(contents, actual_contents)
+
+ def test_read_file(self):
+ # Can read a file that was written.
+
+ signing_directory = _signing_dir.SigningDirectory()
+ self.addCleanup(shutil.rmtree, signing_directory._directory_name)
+
+ file_name = self.getUniqueString()
+ contents = self.getUniqueString()
+ signing_directory.write_file(file_name, contents)
+
+ actual_contents = signing_directory.read_file(file_name)
+
+ self.assertEqual(contents, actual_contents)
+
+ def test_read_file_doesnt_exist(self):
+ # Show what happens when try to read a file that wasn't written.
+
+ signing_directory = _signing_dir.SigningDirectory()
+ self.addCleanup(shutil.rmtree, signing_directory._directory_name)
+
+ file_name = self.getUniqueString()
+ self.assertRaises(IOError, signing_directory.read_file, file_name)
+
+ def test_calc_path(self):
+ # calc_path returns the actual filename built from the directory name.
+
+ signing_directory = _signing_dir.SigningDirectory()
+ self.addCleanup(shutil.rmtree, signing_directory._directory_name)
+
+ file_name = self.getUniqueString()
+ actual_path = signing_directory.calc_path(file_name)
+ expected_path = os.path.join(signing_directory._directory_name,
+ file_name)
+ self.assertEqual(expected_path, actual_path)
diff --git a/keystonemiddleware-moon/keystonemiddleware/tests/unit/auth_token/test_utils.py b/keystonemiddleware-moon/keystonemiddleware/tests/unit/auth_token/test_utils.py
new file mode 100644
index 00000000..fcd1e628
--- /dev/null
+++ b/keystonemiddleware-moon/keystonemiddleware/tests/unit/auth_token/test_utils.py
@@ -0,0 +1,37 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import testtools
+
+from keystonemiddleware.auth_token import _utils
+
+
+class TokenEncodingTest(testtools.TestCase):
+
+ def test_unquoted_token(self):
+ self.assertEqual('foo%20bar', _utils.safe_quote('foo bar'))
+
+ def test_quoted_token(self):
+ self.assertEqual('foo%20bar', _utils.safe_quote('foo%20bar'))
+
+ def test_messages_encoded_as_bytes(self):
+ """Test that string are passed around as bytes for PY3."""
+ msg = "This is an error"
+
+ class FakeResp(_utils.MiniResp):
+ def __init__(self, error, env):
+ super(FakeResp, self).__init__(error, env)
+
+ fake_resp = FakeResp(msg, dict(REQUEST_METHOD='GET'))
+ # On Py2 .encode() don't do much but that's better than to
+ # have a ifdef with six.PY3
+ self.assertEqual(msg.encode(), fake_resp.body[0])
diff --git a/keystonemiddleware-moon/keystonemiddleware/tests/unit/client_fixtures.py b/keystonemiddleware-moon/keystonemiddleware/tests/unit/client_fixtures.py
new file mode 100644
index 00000000..ee4111ec
--- /dev/null
+++ b/keystonemiddleware-moon/keystonemiddleware/tests/unit/client_fixtures.py
@@ -0,0 +1,452 @@
+# Copyright 2013 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import os
+
+import fixtures
+from keystoneclient.common import cms
+from keystoneclient import fixture
+from keystoneclient import utils
+from oslo_serialization import jsonutils
+from oslo_utils import timeutils
+import six
+import testresources
+
+
+TESTDIR = os.path.dirname(os.path.abspath(__file__))
+ROOTDIR = os.path.normpath(os.path.join(TESTDIR, '..', '..', '..'))
+CERTDIR = os.path.join(ROOTDIR, 'examples', 'pki', 'certs')
+CMSDIR = os.path.join(ROOTDIR, 'examples', 'pki', 'cms')
+KEYDIR = os.path.join(ROOTDIR, 'examples', 'pki', 'private')
+
+
+def _hash_signed_token_safe(signed_text, **kwargs):
+ if isinstance(signed_text, six.text_type):
+ signed_text = signed_text.encode('utf-8')
+ return utils.hash_signed_token(signed_text, **kwargs)
+
+
+class Examples(fixtures.Fixture):
+ """Example tokens and certs loaded from the examples directory.
+
+ To use this class correctly, the module needs to override the test suite
+ class to use testresources.OptimisingTestSuite (otherwise the files will
+ be read on every test). This is done by defining a load_tests function
+ in the module, like this:
+
+ def load_tests(loader, tests, pattern):
+ return testresources.OptimisingTestSuite(tests)
+
+ (see http://docs.python.org/2/library/unittest.html#load-tests-protocol )
+
+ """
+
+ def setUp(self):
+ super(Examples, self).setUp()
+
+ # The data for several tests are signed using openssl and are stored in
+ # files in the signing subdirectory. In order to keep the values
+ # consistent between the tests and the signed documents, we read them
+ # in for use in the tests.
+ with open(os.path.join(CMSDIR, 'auth_token_scoped.json')) as f:
+ self.TOKEN_SCOPED_DATA = cms.cms_to_token(f.read())
+
+ with open(os.path.join(CMSDIR, 'auth_token_scoped.pem')) as f:
+ self.SIGNED_TOKEN_SCOPED = cms.cms_to_token(f.read())
+ self.SIGNED_TOKEN_SCOPED_HASH = _hash_signed_token_safe(
+ self.SIGNED_TOKEN_SCOPED)
+ self.SIGNED_TOKEN_SCOPED_HASH_SHA256 = _hash_signed_token_safe(
+ self.SIGNED_TOKEN_SCOPED, mode='sha256')
+ with open(os.path.join(CMSDIR, 'auth_token_unscoped.pem')) as f:
+ self.SIGNED_TOKEN_UNSCOPED = cms.cms_to_token(f.read())
+ with open(os.path.join(CMSDIR, 'auth_v3_token_scoped.pem')) as f:
+ self.SIGNED_v3_TOKEN_SCOPED = cms.cms_to_token(f.read())
+ self.SIGNED_v3_TOKEN_SCOPED_HASH = _hash_signed_token_safe(
+ self.SIGNED_v3_TOKEN_SCOPED)
+ self.SIGNED_v3_TOKEN_SCOPED_HASH_SHA256 = _hash_signed_token_safe(
+ self.SIGNED_v3_TOKEN_SCOPED, mode='sha256')
+ with open(os.path.join(CMSDIR, 'auth_token_revoked.pem')) as f:
+ self.REVOKED_TOKEN = cms.cms_to_token(f.read())
+ with open(os.path.join(CMSDIR, 'auth_token_scoped_expired.pem')) as f:
+ self.SIGNED_TOKEN_SCOPED_EXPIRED = cms.cms_to_token(f.read())
+ with open(os.path.join(CMSDIR, 'auth_v3_token_revoked.pem')) as f:
+ self.REVOKED_v3_TOKEN = cms.cms_to_token(f.read())
+ with open(os.path.join(CMSDIR, 'auth_token_scoped.pkiz')) as f:
+ self.SIGNED_TOKEN_SCOPED_PKIZ = cms.cms_to_token(f.read())
+ with open(os.path.join(CMSDIR, 'auth_token_unscoped.pkiz')) as f:
+ self.SIGNED_TOKEN_UNSCOPED_PKIZ = cms.cms_to_token(f.read())
+ with open(os.path.join(CMSDIR, 'auth_v3_token_scoped.pkiz')) as f:
+ self.SIGNED_v3_TOKEN_SCOPED_PKIZ = cms.cms_to_token(f.read())
+ with open(os.path.join(CMSDIR, 'auth_token_revoked.pkiz')) as f:
+ self.REVOKED_TOKEN_PKIZ = cms.cms_to_token(f.read())
+ with open(os.path.join(CMSDIR,
+ 'auth_token_scoped_expired.pkiz')) as f:
+ self.SIGNED_TOKEN_SCOPED_EXPIRED_PKIZ = cms.cms_to_token(f.read())
+ with open(os.path.join(CMSDIR, 'auth_v3_token_revoked.pkiz')) as f:
+ self.REVOKED_v3_TOKEN_PKIZ = cms.cms_to_token(f.read())
+ with open(os.path.join(CMSDIR, 'revocation_list.json')) as f:
+ self.REVOCATION_LIST = jsonutils.loads(f.read())
+ with open(os.path.join(CMSDIR, 'revocation_list.pem')) as f:
+ self.SIGNED_REVOCATION_LIST = jsonutils.dumps({'signed': f.read()})
+
+ self.SIGNING_CERT_FILE = os.path.join(CERTDIR, 'signing_cert.pem')
+ with open(self.SIGNING_CERT_FILE) as f:
+ self.SIGNING_CERT = f.read()
+
+ self.KERBEROS_BIND = 'USER@REALM'
+
+ self.SIGNING_KEY_FILE = os.path.join(KEYDIR, 'signing_key.pem')
+ with open(self.SIGNING_KEY_FILE) as f:
+ self.SIGNING_KEY = f.read()
+
+ self.SIGNING_CA_FILE = os.path.join(CERTDIR, 'cacert.pem')
+ with open(self.SIGNING_CA_FILE) as f:
+ self.SIGNING_CA = f.read()
+
+ self.UUID_TOKEN_DEFAULT = "ec6c0710ec2f471498484c1b53ab4f9d"
+ self.UUID_TOKEN_NO_SERVICE_CATALOG = '8286720fbe4941e69fa8241723bb02df'
+ self.UUID_TOKEN_UNSCOPED = '731f903721c14827be7b2dc912af7776'
+ self.UUID_TOKEN_BIND = '3fc54048ad64405c98225ce0897af7c5'
+ self.UUID_TOKEN_UNKNOWN_BIND = '8885fdf4d42e4fb9879e6379fa1eaf48'
+ self.VALID_DIABLO_TOKEN = 'b0cf19b55dbb4f20a6ee18e6c6cf1726'
+ self.v3_UUID_TOKEN_DEFAULT = '5603457654b346fdbb93437bfe76f2f1'
+ self.v3_UUID_TOKEN_UNSCOPED = 'd34835fdaec447e695a0a024d84f8d79'
+ self.v3_UUID_TOKEN_DOMAIN_SCOPED = 'e8a7b63aaa4449f38f0c5c05c3581792'
+ self.v3_UUID_TOKEN_BIND = '2f61f73e1c854cbb9534c487f9bd63c2'
+ self.v3_UUID_TOKEN_UNKNOWN_BIND = '7ed9781b62cd4880b8d8c6788ab1d1e2'
+
+ self.UUID_SERVICE_TOKEN_DEFAULT = 'fe4c0710ec2f492748596c1b53ab124'
+ self.v3_UUID_SERVICE_TOKEN_DEFAULT = 'g431071bbc2f492748596c1b53cb229'
+
+ revoked_token = self.REVOKED_TOKEN
+ if isinstance(revoked_token, six.text_type):
+ revoked_token = revoked_token.encode('utf-8')
+ self.REVOKED_TOKEN_HASH = utils.hash_signed_token(revoked_token)
+ self.REVOKED_TOKEN_HASH_SHA256 = utils.hash_signed_token(revoked_token,
+ mode='sha256')
+ self.REVOKED_TOKEN_LIST = (
+ {'revoked': [{'id': self.REVOKED_TOKEN_HASH,
+ 'expires': timeutils.utcnow()}]})
+ self.REVOKED_TOKEN_LIST_JSON = jsonutils.dumps(self.REVOKED_TOKEN_LIST)
+
+ revoked_v3_token = self.REVOKED_v3_TOKEN
+ if isinstance(revoked_v3_token, six.text_type):
+ revoked_v3_token = revoked_v3_token.encode('utf-8')
+ self.REVOKED_v3_TOKEN_HASH = utils.hash_signed_token(revoked_v3_token)
+ hash = utils.hash_signed_token(revoked_v3_token, mode='sha256')
+ self.REVOKED_v3_TOKEN_HASH_SHA256 = hash
+ self.REVOKED_v3_TOKEN_LIST = (
+ {'revoked': [{'id': self.REVOKED_v3_TOKEN_HASH,
+ 'expires': timeutils.utcnow()}]})
+ self.REVOKED_v3_TOKEN_LIST_JSON = jsonutils.dumps(
+ self.REVOKED_v3_TOKEN_LIST)
+
+ revoked_token_pkiz = self.REVOKED_TOKEN_PKIZ
+ if isinstance(revoked_token_pkiz, six.text_type):
+ revoked_token_pkiz = revoked_token_pkiz.encode('utf-8')
+ self.REVOKED_TOKEN_PKIZ_HASH = utils.hash_signed_token(
+ revoked_token_pkiz)
+ revoked_v3_token_pkiz = self.REVOKED_v3_TOKEN_PKIZ
+ if isinstance(revoked_v3_token_pkiz, six.text_type):
+ revoked_v3_token_pkiz = revoked_v3_token_pkiz.encode('utf-8')
+ self.REVOKED_v3_PKIZ_TOKEN_HASH = utils.hash_signed_token(
+ revoked_v3_token_pkiz)
+
+ self.REVOKED_TOKEN_PKIZ_LIST = (
+ {'revoked': [{'id': self.REVOKED_TOKEN_PKIZ_HASH,
+ 'expires': timeutils.utcnow()},
+ {'id': self.REVOKED_v3_PKIZ_TOKEN_HASH,
+ 'expires': timeutils.utcnow()},
+ ]})
+ self.REVOKED_TOKEN_PKIZ_LIST_JSON = jsonutils.dumps(
+ self.REVOKED_TOKEN_PKIZ_LIST)
+
+ self.SIGNED_TOKEN_SCOPED_KEY = cms.cms_hash_token(
+ self.SIGNED_TOKEN_SCOPED)
+ self.SIGNED_TOKEN_UNSCOPED_KEY = cms.cms_hash_token(
+ self.SIGNED_TOKEN_UNSCOPED)
+ self.SIGNED_v3_TOKEN_SCOPED_KEY = cms.cms_hash_token(
+ self.SIGNED_v3_TOKEN_SCOPED)
+
+ self.SIGNED_TOKEN_SCOPED_PKIZ_KEY = cms.cms_hash_token(
+ self.SIGNED_TOKEN_SCOPED_PKIZ)
+ self.SIGNED_TOKEN_UNSCOPED_PKIZ_KEY = cms.cms_hash_token(
+ self.SIGNED_TOKEN_UNSCOPED_PKIZ)
+ self.SIGNED_v3_TOKEN_SCOPED_PKIZ_KEY = cms.cms_hash_token(
+ self.SIGNED_v3_TOKEN_SCOPED_PKIZ)
+
+ self.INVALID_SIGNED_TOKEN = (
+ "MIIAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
+ "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB"
+ "CCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCC"
+ "DDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDD"
+ "EEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEE"
+ "FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF"
+ "0000000000000000000000000000000000000000000000000000000000000000"
+ "1111111111111111111111111111111111111111111111111111111111111111"
+ "2222222222222222222222222222222222222222222222222222222222222222"
+ "3333333333333333333333333333333333333333333333333333333333333333"
+ "4444444444444444444444444444444444444444444444444444444444444444"
+ "5555555555555555555555555555555555555555555555555555555555555555"
+ "6666666666666666666666666666666666666666666666666666666666666666"
+ "7777777777777777777777777777777777777777777777777777777777777777"
+ "8888888888888888888888888888888888888888888888888888888888888888"
+ "9999999999999999999999999999999999999999999999999999999999999999"
+ "0000000000000000000000000000000000000000000000000000000000000000")
+
+ self.INVALID_SIGNED_PKIZ_TOKEN = (
+ "PKIZ_AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
+ "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB"
+ "CCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCC"
+ "DDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDD"
+ "EEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEE"
+ "FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF"
+ "0000000000000000000000000000000000000000000000000000000000000000"
+ "1111111111111111111111111111111111111111111111111111111111111111"
+ "2222222222222222222222222222222222222222222222222222222222222222"
+ "3333333333333333333333333333333333333333333333333333333333333333"
+ "4444444444444444444444444444444444444444444444444444444444444444"
+ "5555555555555555555555555555555555555555555555555555555555555555"
+ "6666666666666666666666666666666666666666666666666666666666666666"
+ "7777777777777777777777777777777777777777777777777777777777777777"
+ "8888888888888888888888888888888888888888888888888888888888888888"
+ "9999999999999999999999999999999999999999999999999999999999999999"
+ "0000000000000000000000000000000000000000000000000000000000000000")
+
+ # JSON responses keyed by token ID
+ self.TOKEN_RESPONSES = {}
+
+ # basic values
+ PROJECT_ID = 'tenant_id1'
+ PROJECT_NAME = 'tenant_name1'
+ USER_ID = 'user_id1'
+ USER_NAME = 'user_name1'
+ DOMAIN_ID = 'domain_id1'
+ DOMAIN_NAME = 'domain_name1'
+ ROLE_NAME1 = 'role1'
+ ROLE_NAME2 = 'role2'
+
+ SERVICE_PROJECT_ID = 'service_project_id1'
+ SERVICE_PROJECT_NAME = 'service_project_name1'
+ SERVICE_USER_ID = 'service_user_id1'
+ SERVICE_USER_NAME = 'service_user_name1'
+ SERVICE_DOMAIN_ID = 'service_domain_id1'
+ SERVICE_DOMAIN_NAME = 'service_domain_name1'
+ SERVICE_ROLE_NAME1 = 'service_role1'
+ SERVICE_ROLE_NAME2 = 'service_role2'
+
+ self.SERVICE_TYPE = 'identity'
+ self.UNVERSIONED_SERVICE_URL = 'http://keystone.server:5000/'
+ self.SERVICE_URL = self.UNVERSIONED_SERVICE_URL + 'v2.0'
+
+ # Old Tokens
+
+ self.TOKEN_RESPONSES[self.VALID_DIABLO_TOKEN] = {
+ 'access': {
+ 'token': {
+ 'id': self.VALID_DIABLO_TOKEN,
+ 'expires': '2020-01-01T00:00:10.000123Z',
+ 'tenantId': PROJECT_ID,
+ },
+ 'user': {
+ 'id': USER_ID,
+ 'name': USER_NAME,
+ 'roles': [
+ {'name': ROLE_NAME1},
+ {'name': ROLE_NAME2},
+ ],
+ },
+ },
+ }
+
+ # Generated V2 Tokens
+
+ token = fixture.V2Token(token_id=self.UUID_TOKEN_DEFAULT,
+ tenant_id=PROJECT_ID,
+ tenant_name=PROJECT_NAME,
+ user_id=USER_ID,
+ user_name=USER_NAME)
+ token.add_role(name=ROLE_NAME1)
+ token.add_role(name=ROLE_NAME2)
+ svc = token.add_service(self.SERVICE_TYPE)
+ svc.add_endpoint(public=self.SERVICE_URL)
+ self.TOKEN_RESPONSES[self.UUID_TOKEN_DEFAULT] = token
+
+ token = fixture.V2Token(token_id=self.UUID_TOKEN_UNSCOPED,
+ user_id=USER_ID,
+ user_name=USER_NAME)
+ self.TOKEN_RESPONSES[self.UUID_TOKEN_UNSCOPED] = token
+
+ token = fixture.V2Token(token_id='valid-token',
+ tenant_id=PROJECT_ID,
+ tenant_name=PROJECT_NAME,
+ user_id=USER_ID,
+ user_name=USER_NAME)
+ token.add_role(ROLE_NAME1)
+ token.add_role(ROLE_NAME2)
+ self.TOKEN_RESPONSES[self.UUID_TOKEN_NO_SERVICE_CATALOG] = token
+
+ token = fixture.V2Token(token_id=self.SIGNED_TOKEN_SCOPED_KEY,
+ tenant_id=PROJECT_ID,
+ tenant_name=PROJECT_NAME,
+ user_id=USER_ID,
+ user_name=USER_NAME)
+ token.add_role(ROLE_NAME1)
+ token.add_role(ROLE_NAME2)
+ self.TOKEN_RESPONSES[self.SIGNED_TOKEN_SCOPED_KEY] = token
+
+ token = fixture.V2Token(token_id=self.SIGNED_TOKEN_UNSCOPED_KEY,
+ user_id=USER_ID,
+ user_name=USER_NAME)
+ self.TOKEN_RESPONSES[self.SIGNED_TOKEN_UNSCOPED_KEY] = token
+
+ token = fixture.V2Token(token_id=self.UUID_TOKEN_BIND,
+ tenant_id=PROJECT_ID,
+ tenant_name=PROJECT_NAME,
+ user_id=USER_ID,
+ user_name=USER_NAME)
+ token.add_role(ROLE_NAME1)
+ token.add_role(ROLE_NAME2)
+ token['access']['token']['bind'] = {'kerberos': self.KERBEROS_BIND}
+ self.TOKEN_RESPONSES[self.UUID_TOKEN_BIND] = token
+
+ token = fixture.V2Token(token_id=self.UUID_TOKEN_UNKNOWN_BIND,
+ tenant_id=PROJECT_ID,
+ tenant_name=PROJECT_NAME,
+ user_id=USER_ID,
+ user_name=USER_NAME)
+ token.add_role(ROLE_NAME1)
+ token.add_role(ROLE_NAME2)
+ token['access']['token']['bind'] = {'FOO': 'BAR'}
+ self.TOKEN_RESPONSES[self.UUID_TOKEN_UNKNOWN_BIND] = token
+
+ token = fixture.V2Token(token_id=self.UUID_SERVICE_TOKEN_DEFAULT,
+ tenant_id=SERVICE_PROJECT_ID,
+ tenant_name=SERVICE_PROJECT_NAME,
+ user_id=SERVICE_USER_ID,
+ user_name=SERVICE_USER_NAME)
+ token.add_role(name=SERVICE_ROLE_NAME1)
+ token.add_role(name=SERVICE_ROLE_NAME2)
+ svc = token.add_service(self.SERVICE_TYPE)
+ svc.add_endpoint(public=self.SERVICE_URL)
+ self.TOKEN_RESPONSES[self.UUID_SERVICE_TOKEN_DEFAULT] = token
+
+ # Generated V3 Tokens
+
+ token = fixture.V3Token(user_id=USER_ID,
+ user_name=USER_NAME,
+ user_domain_id=DOMAIN_ID,
+ user_domain_name=DOMAIN_NAME,
+ project_id=PROJECT_ID,
+ project_name=PROJECT_NAME,
+ project_domain_id=DOMAIN_ID,
+ project_domain_name=DOMAIN_NAME)
+ token.add_role(id=ROLE_NAME1, name=ROLE_NAME1)
+ token.add_role(id=ROLE_NAME2, name=ROLE_NAME2)
+ svc = token.add_service(self.SERVICE_TYPE)
+ svc.add_endpoint('public', self.SERVICE_URL)
+ self.TOKEN_RESPONSES[self.v3_UUID_TOKEN_DEFAULT] = token
+
+ token = fixture.V3Token(user_id=USER_ID,
+ user_name=USER_NAME,
+ user_domain_id=DOMAIN_ID,
+ user_domain_name=DOMAIN_NAME)
+ self.TOKEN_RESPONSES[self.v3_UUID_TOKEN_UNSCOPED] = token
+
+ token = fixture.V3Token(user_id=USER_ID,
+ user_name=USER_NAME,
+ user_domain_id=DOMAIN_ID,
+ user_domain_name=DOMAIN_NAME,
+ domain_id=DOMAIN_ID,
+ domain_name=DOMAIN_NAME)
+ token.add_role(id=ROLE_NAME1, name=ROLE_NAME1)
+ token.add_role(id=ROLE_NAME2, name=ROLE_NAME2)
+ svc = token.add_service(self.SERVICE_TYPE)
+ svc.add_endpoint('public', self.SERVICE_URL)
+ self.TOKEN_RESPONSES[self.v3_UUID_TOKEN_DOMAIN_SCOPED] = token
+
+ token = fixture.V3Token(user_id=USER_ID,
+ user_name=USER_NAME,
+ user_domain_id=DOMAIN_ID,
+ user_domain_name=DOMAIN_NAME,
+ project_id=PROJECT_ID,
+ project_name=PROJECT_NAME,
+ project_domain_id=DOMAIN_ID,
+ project_domain_name=DOMAIN_NAME)
+ token.add_role(name=ROLE_NAME1)
+ token.add_role(name=ROLE_NAME2)
+ svc = token.add_service(self.SERVICE_TYPE)
+ svc.add_endpoint('public', self.SERVICE_URL)
+ self.TOKEN_RESPONSES[self.SIGNED_v3_TOKEN_SCOPED_KEY] = token
+
+ token = fixture.V3Token(user_id=USER_ID,
+ user_name=USER_NAME,
+ user_domain_id=DOMAIN_ID,
+ user_domain_name=DOMAIN_NAME,
+ project_id=PROJECT_ID,
+ project_name=PROJECT_NAME,
+ project_domain_id=DOMAIN_ID,
+ project_domain_name=DOMAIN_NAME)
+ token.add_role(name=ROLE_NAME1)
+ token.add_role(name=ROLE_NAME2)
+ svc = token.add_service(self.SERVICE_TYPE)
+ svc.add_endpoint('public', self.SERVICE_URL)
+ token['token']['bind'] = {'kerberos': self.KERBEROS_BIND}
+ self.TOKEN_RESPONSES[self.v3_UUID_TOKEN_BIND] = token
+
+ token = fixture.V3Token(user_id=USER_ID,
+ user_name=USER_NAME,
+ user_domain_id=DOMAIN_ID,
+ user_domain_name=DOMAIN_NAME,
+ project_id=PROJECT_ID,
+ project_name=PROJECT_NAME,
+ project_domain_id=DOMAIN_ID,
+ project_domain_name=DOMAIN_NAME)
+ token.add_role(name=ROLE_NAME1)
+ token.add_role(name=ROLE_NAME2)
+ svc = token.add_service(self.SERVICE_TYPE)
+ svc.add_endpoint('public', self.SERVICE_URL)
+ token['token']['bind'] = {'FOO': 'BAR'}
+ self.TOKEN_RESPONSES[self.v3_UUID_TOKEN_UNKNOWN_BIND] = token
+
+ token = fixture.V3Token(user_id=SERVICE_USER_ID,
+ user_name=SERVICE_USER_NAME,
+ user_domain_id=SERVICE_DOMAIN_ID,
+ user_domain_name=SERVICE_DOMAIN_NAME,
+ project_id=SERVICE_PROJECT_ID,
+ project_name=SERVICE_PROJECT_NAME,
+ project_domain_id=SERVICE_DOMAIN_ID,
+ project_domain_name=SERVICE_DOMAIN_NAME)
+ token.add_role(id=SERVICE_ROLE_NAME1,
+ name=SERVICE_ROLE_NAME1)
+ token.add_role(id=SERVICE_ROLE_NAME2,
+ name=SERVICE_ROLE_NAME2)
+ svc = token.add_service(self.SERVICE_TYPE)
+ svc.add_endpoint('public', self.SERVICE_URL)
+ self.TOKEN_RESPONSES[self.v3_UUID_SERVICE_TOKEN_DEFAULT] = token
+
+ # PKIZ tokens generally link to above tokens
+
+ self.TOKEN_RESPONSES[self.SIGNED_TOKEN_SCOPED_PKIZ_KEY] = (
+ self.TOKEN_RESPONSES[self.SIGNED_TOKEN_SCOPED_KEY])
+ self.TOKEN_RESPONSES[self.SIGNED_TOKEN_UNSCOPED_PKIZ_KEY] = (
+ self.TOKEN_RESPONSES[self.SIGNED_TOKEN_UNSCOPED_KEY])
+ self.TOKEN_RESPONSES[self.SIGNED_v3_TOKEN_SCOPED_PKIZ_KEY] = (
+ self.TOKEN_RESPONSES[self.SIGNED_v3_TOKEN_SCOPED_KEY])
+
+ self.JSON_TOKEN_RESPONSES = dict([(k, jsonutils.dumps(v)) for k, v in
+ six.iteritems(self.TOKEN_RESPONSES)])
+
+
+EXAMPLES_RESOURCE = testresources.FixtureResource(Examples())
diff --git a/keystonemiddleware-moon/keystonemiddleware/tests/unit/test_audit_middleware.py b/keystonemiddleware-moon/keystonemiddleware/tests/unit/test_audit_middleware.py
new file mode 100644
index 00000000..89e5aa44
--- /dev/null
+++ b/keystonemiddleware-moon/keystonemiddleware/tests/unit/test_audit_middleware.py
@@ -0,0 +1,485 @@
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import os
+import tempfile
+import uuid
+
+import mock
+from oslo_config import cfg
+from pycadf import identifier
+import testtools
+from testtools import matchers
+import webob
+
+from keystonemiddleware import audit
+
+
+class FakeApp(object):
+ def __call__(self, env, start_response):
+ body = 'Some response'
+ start_response('200 OK', [
+ ('Content-Type', 'text/plain'),
+ ('Content-Length', str(sum(map(len, body))))
+ ])
+ return [body]
+
+
+class FakeFailingApp(object):
+ def __call__(self, env, start_response):
+ raise Exception('It happens!')
+
+
+class BaseAuditMiddlewareTest(testtools.TestCase):
+ def setUp(self):
+ super(BaseAuditMiddlewareTest, self).setUp()
+ self.fd, self.audit_map = tempfile.mkstemp()
+
+ with open(self.audit_map, "w") as f:
+ f.write("[custom_actions]\n")
+ f.write("reboot = start/reboot\n")
+ f.write("os-migrations/get = read\n\n")
+ f.write("[path_keywords]\n")
+ f.write("action = None\n")
+ f.write("os-hosts = host\n")
+ f.write("os-migrations = None\n")
+ f.write("reboot = None\n")
+ f.write("servers = server\n\n")
+ f.write("[service_endpoints]\n")
+ f.write("compute = service/compute")
+
+ cfg.CONF([], project='keystonemiddleware')
+
+ self.middleware = audit.AuditMiddleware(
+ FakeApp(), audit_map_file=self.audit_map,
+ service_name='pycadf')
+
+ self.addCleanup(lambda: os.close(self.fd))
+ self.addCleanup(cfg.CONF.reset)
+
+ @staticmethod
+ def get_environ_header(req_type):
+ env_headers = {'HTTP_X_SERVICE_CATALOG':
+ '''[{"endpoints_links": [],
+ "endpoints": [{"adminURL":
+ "http://admin_host:8774",
+ "region": "RegionOne",
+ "publicURL":
+ "http://public_host:8774",
+ "internalURL":
+ "http://internal_host:8774",
+ "id": "resource_id"}],
+ "type": "compute",
+ "name": "nova"},]''',
+ 'HTTP_X_USER_ID': 'user_id',
+ 'HTTP_X_USER_NAME': 'user_name',
+ 'HTTP_X_AUTH_TOKEN': 'token',
+ 'HTTP_X_PROJECT_ID': 'tenant_id',
+ 'HTTP_X_IDENTITY_STATUS': 'Confirmed'}
+ env_headers['REQUEST_METHOD'] = req_type
+ return env_headers
+
+
+@mock.patch('oslo.messaging.get_transport', mock.MagicMock())
+class AuditMiddlewareTest(BaseAuditMiddlewareTest):
+
+ def test_api_request(self):
+ req = webob.Request.blank('/foo/bar',
+ environ=self.get_environ_header('GET'))
+ with mock.patch('oslo.messaging.Notifier.info') as notify:
+ self.middleware(req)
+ # Check first notification with only 'request'
+ call_args = notify.call_args_list[0][0]
+ self.assertEqual('audit.http.request', call_args[1])
+ self.assertEqual('/foo/bar', call_args[2]['requestPath'])
+ self.assertEqual('pending', call_args[2]['outcome'])
+ self.assertNotIn('reason', call_args[2])
+ self.assertNotIn('reporterchain', call_args[2])
+
+ # Check second notification with request + response
+ call_args = notify.call_args_list[1][0]
+ self.assertEqual('audit.http.response', call_args[1])
+ self.assertEqual('/foo/bar', call_args[2]['requestPath'])
+ self.assertEqual('success', call_args[2]['outcome'])
+ self.assertIn('reason', call_args[2])
+ self.assertIn('reporterchain', call_args[2])
+
+ def test_api_request_failure(self):
+ self.middleware = audit.AuditMiddleware(
+ FakeFailingApp(),
+ audit_map_file=self.audit_map,
+ service_name='pycadf')
+ req = webob.Request.blank('/foo/bar',
+ environ=self.get_environ_header('GET'))
+ with mock.patch('oslo.messaging.Notifier.info') as notify:
+ try:
+ self.middleware(req)
+ self.fail('Application exception has not been re-raised')
+ except Exception:
+ pass
+ # Check first notification with only 'request'
+ call_args = notify.call_args_list[0][0]
+ self.assertEqual('audit.http.request', call_args[1])
+ self.assertEqual('/foo/bar', call_args[2]['requestPath'])
+ self.assertEqual('pending', call_args[2]['outcome'])
+ self.assertNotIn('reporterchain', call_args[2])
+
+ # Check second notification with request + response
+ call_args = notify.call_args_list[1][0]
+ self.assertEqual('audit.http.response', call_args[1])
+ self.assertEqual('/foo/bar', call_args[2]['requestPath'])
+ self.assertEqual('unknown', call_args[2]['outcome'])
+ self.assertIn('reporterchain', call_args[2])
+
+ def test_process_request_fail(self):
+ req = webob.Request.blank('/foo/bar',
+ environ=self.get_environ_header('GET'))
+ with mock.patch('oslo.messaging.Notifier.info',
+ side_effect=Exception('error')) as notify:
+ self.middleware._process_request(req)
+ self.assertTrue(notify.called)
+
+ def test_process_response_fail(self):
+ req = webob.Request.blank('/foo/bar',
+ environ=self.get_environ_header('GET'))
+ with mock.patch('oslo.messaging.Notifier.info',
+ side_effect=Exception('error')) as notify:
+ self.middleware._process_response(req, webob.response.Response())
+ self.assertTrue(notify.called)
+
+ def test_ignore_req_opt(self):
+ self.middleware = audit.AuditMiddleware(FakeApp(),
+ audit_map_file=self.audit_map,
+ ignore_req_list='get, PUT')
+ req = webob.Request.blank('/skip/foo',
+ environ=self.get_environ_header('GET'))
+ req1 = webob.Request.blank('/skip/foo',
+ environ=self.get_environ_header('PUT'))
+ req2 = webob.Request.blank('/accept/foo',
+ environ=self.get_environ_header('POST'))
+ with mock.patch('oslo.messaging.Notifier.info') as notify:
+ # Check GET/PUT request does not send notification
+ self.middleware(req)
+ self.middleware(req1)
+ self.assertEqual([], notify.call_args_list)
+
+ # Check non-GET/PUT request does send notification
+ self.middleware(req2)
+ self.assertThat(notify.call_args_list, matchers.HasLength(2))
+ call_args = notify.call_args_list[0][0]
+ self.assertEqual('audit.http.request', call_args[1])
+ self.assertEqual('/accept/foo', call_args[2]['requestPath'])
+
+ call_args = notify.call_args_list[1][0]
+ self.assertEqual('audit.http.response', call_args[1])
+ self.assertEqual('/accept/foo', call_args[2]['requestPath'])
+
+ def test_api_request_no_messaging(self):
+ req = webob.Request.blank('/foo/bar',
+ environ=self.get_environ_header('GET'))
+ with mock.patch('keystonemiddleware.audit.messaging', None):
+ with mock.patch('keystonemiddleware.audit._LOG.info') as log:
+ self.middleware(req)
+ # Check first notification with only 'request'
+ call_args = log.call_args_list[0][0]
+ self.assertEqual('audit.http.request',
+ call_args[1]['event_type'])
+
+ # Check second notification with request + response
+ call_args = log.call_args_list[1][0]
+ self.assertEqual('audit.http.response',
+ call_args[1]['event_type'])
+
+ def test_cadf_event_scoped_to_request(self):
+ middleware = audit.AuditMiddleware(
+ FakeApp(),
+ audit_map_file=self.audit_map,
+ service_name='pycadf')
+ req = webob.Request.blank('/foo/bar',
+ environ=self.get_environ_header('GET'))
+ with mock.patch('oslo.messaging.Notifier.info') as notify:
+ middleware(req)
+ self.assertIsNotNone(req.environ.get('cadf_event'))
+
+ # ensure exact same event is used between request and response
+ self.assertEqual(notify.call_args_list[0][0][2]['id'],
+ notify.call_args_list[1][0][2]['id'])
+
+ def test_cadf_event_scoped_to_request_on_error(self):
+ middleware = audit.AuditMiddleware(
+ FakeApp(),
+ audit_map_file=self.audit_map,
+ service_name='pycadf')
+ req = webob.Request.blank('/foo/bar',
+ environ=self.get_environ_header('GET'))
+ with mock.patch('oslo.messaging.Notifier.info',
+ side_effect=Exception('error')) as notify:
+ middleware._process_request(req)
+ self.assertTrue(notify.called)
+ req2 = webob.Request.blank('/foo/bar',
+ environ=self.get_environ_header('GET'))
+ with mock.patch('oslo.messaging.Notifier.info') as notify:
+ middleware._process_response(req2, webob.response.Response())
+ self.assertTrue(notify.called)
+ # ensure event is not the same across requests
+ self.assertNotEqual(req.environ['cadf_event'].id,
+ notify.call_args_list[0][0][2]['id'])
+
+
+@mock.patch('oslo.messaging', mock.MagicMock())
+class AuditApiLogicTest(BaseAuditMiddlewareTest):
+
+ def api_request(self, method, url):
+ req = webob.Request.blank(url, environ=self.get_environ_header(method),
+ remote_addr='192.168.0.1')
+ self.middleware._process_request(req)
+ return req
+
+ def test_get_list(self):
+ req = self.api_request('GET', 'http://admin_host:8774/v2/'
+ + str(uuid.uuid4()) + '/servers')
+ payload = req.environ['cadf_event'].as_dict()
+ self.assertEqual(payload['action'], 'read/list')
+ self.assertEqual(payload['typeURI'],
+ 'http://schemas.dmtf.org/cloud/audit/1.0/event')
+ self.assertEqual(payload['outcome'], 'pending')
+ self.assertEqual(payload['eventType'], 'activity')
+ self.assertEqual(payload['target']['name'], 'nova')
+ self.assertEqual(payload['target']['id'], 'openstack:resource_id')
+ self.assertEqual(payload['target']['typeURI'],
+ 'service/compute/servers')
+ self.assertEqual(len(payload['target']['addresses']), 3)
+ self.assertEqual(payload['target']['addresses'][0]['name'], 'admin')
+ self.assertEqual(payload['target']['addresses'][0]['url'],
+ 'http://admin_host:8774')
+ self.assertEqual(payload['initiator']['id'], 'openstack:user_id')
+ self.assertEqual(payload['initiator']['name'], 'user_name')
+ self.assertEqual(payload['initiator']['project_id'],
+ 'openstack:tenant_id')
+ self.assertEqual(payload['initiator']['host']['address'],
+ '192.168.0.1')
+ self.assertEqual(payload['initiator']['typeURI'],
+ 'service/security/account/user')
+ self.assertNotEqual(payload['initiator']['credential']['token'],
+ 'token')
+ self.assertEqual(payload['initiator']['credential']['identity_status'],
+ 'Confirmed')
+ self.assertNotIn('reason', payload)
+ self.assertNotIn('reporterchain', payload)
+ self.assertEqual(payload['observer']['id'], 'target')
+ self.assertEqual(req.path, payload['requestPath'])
+
+ def test_get_read(self):
+ req = self.api_request('GET', 'http://admin_host:8774/v2/'
+ + str(uuid.uuid4()) + '/servers/'
+ + str(uuid.uuid4()))
+ payload = req.environ['cadf_event'].as_dict()
+ self.assertEqual(payload['target']['typeURI'],
+ 'service/compute/servers/server')
+ self.assertEqual(payload['action'], 'read')
+ self.assertEqual(payload['outcome'], 'pending')
+
+ def test_get_unknown_endpoint(self):
+ req = self.api_request('GET', 'http://unknown:8774/v2/'
+ + str(uuid.uuid4()) + '/servers')
+ payload = req.environ['cadf_event'].as_dict()
+ self.assertEqual(payload['action'], 'read/list')
+ self.assertEqual(payload['outcome'], 'pending')
+ self.assertEqual(payload['target']['name'], 'unknown')
+ self.assertEqual(payload['target']['id'], 'unknown')
+ self.assertEqual(payload['target']['typeURI'], 'unknown')
+
+ def test_get_unknown_endpoint_default_set(self):
+ with open(self.audit_map, "w") as f:
+ f.write("[DEFAULT]\n")
+ f.write("target_endpoint_type = compute\n")
+ f.write("[path_keywords]\n")
+ f.write("servers = server\n\n")
+ f.write("[service_endpoints]\n")
+ f.write("compute = service/compute")
+
+ self.middleware = audit.AuditMiddleware(
+ FakeApp(), audit_map_file=self.audit_map,
+ service_name='pycadf')
+
+ req = self.api_request('GET', 'http://unknown:8774/v2/'
+ + str(uuid.uuid4()) + '/servers')
+ payload = req.environ['cadf_event'].as_dict()
+ self.assertEqual(payload['action'], 'read/list')
+ self.assertEqual(payload['outcome'], 'pending')
+ self.assertEqual(payload['target']['name'], 'nova')
+ self.assertEqual(payload['target']['id'], 'openstack:resource_id')
+ self.assertEqual(payload['target']['typeURI'],
+ 'service/compute/servers')
+
+ def test_put(self):
+ req = self.api_request('PUT', 'http://admin_host:8774/v2/'
+ + str(uuid.uuid4()) + '/servers')
+ payload = req.environ['cadf_event'].as_dict()
+ self.assertEqual(payload['target']['typeURI'],
+ 'service/compute/servers')
+ self.assertEqual(payload['action'], 'update')
+ self.assertEqual(payload['outcome'], 'pending')
+
+ def test_delete(self):
+ req = self.api_request('DELETE', 'http://admin_host:8774/v2/'
+ + str(uuid.uuid4()) + '/servers')
+ payload = req.environ['cadf_event'].as_dict()
+ self.assertEqual(payload['target']['typeURI'],
+ 'service/compute/servers')
+ self.assertEqual(payload['action'], 'delete')
+ self.assertEqual(payload['outcome'], 'pending')
+
+ def test_head(self):
+ req = self.api_request('HEAD', 'http://admin_host:8774/v2/'
+ + str(uuid.uuid4()) + '/servers')
+ payload = req.environ['cadf_event'].as_dict()
+ self.assertEqual(payload['target']['typeURI'],
+ 'service/compute/servers')
+ self.assertEqual(payload['action'], 'read')
+ self.assertEqual(payload['outcome'], 'pending')
+
+ def test_post_update(self):
+ req = self.api_request('POST',
+ 'http://admin_host:8774/v2/'
+ + str(uuid.uuid4()) + '/servers/'
+ + str(uuid.uuid4()))
+ payload = req.environ['cadf_event'].as_dict()
+ self.assertEqual(payload['target']['typeURI'],
+ 'service/compute/servers/server')
+ self.assertEqual(payload['action'], 'update')
+ self.assertEqual(payload['outcome'], 'pending')
+
+ def test_post_create(self):
+ req = self.api_request('POST', 'http://admin_host:8774/v2/'
+ + str(uuid.uuid4()) + '/servers')
+ payload = req.environ['cadf_event'].as_dict()
+ self.assertEqual(payload['target']['typeURI'],
+ 'service/compute/servers')
+ self.assertEqual(payload['action'], 'create')
+ self.assertEqual(payload['outcome'], 'pending')
+
+ def test_post_action(self):
+ req = webob.Request.blank('http://admin_host:8774/v2/'
+ + str(uuid.uuid4()) + '/servers/action',
+ environ=self.get_environ_header('POST'))
+ req.body = b'{"createImage" : {"name" : "new-image","metadata": ' \
+ b'{"ImageType": "Gold","ImageVersion": "2.0"}}}'
+ self.middleware._process_request(req)
+ payload = req.environ['cadf_event'].as_dict()
+ self.assertEqual(payload['target']['typeURI'],
+ 'service/compute/servers/action')
+ self.assertEqual(payload['action'], 'update/createImage')
+ self.assertEqual(payload['outcome'], 'pending')
+
+ def test_post_empty_body_action(self):
+ req = self.api_request('POST', 'http://admin_host:8774/v2/'
+ + str(uuid.uuid4()) + '/servers/action')
+ payload = req.environ['cadf_event'].as_dict()
+ self.assertEqual(payload['target']['typeURI'],
+ 'service/compute/servers/action')
+ self.assertEqual(payload['action'], 'create')
+ self.assertEqual(payload['outcome'], 'pending')
+
+ def test_custom_action(self):
+ req = self.api_request('GET', 'http://admin_host:8774/v2/'
+ + str(uuid.uuid4()) + '/os-hosts/'
+ + str(uuid.uuid4()) + '/reboot')
+ payload = req.environ['cadf_event'].as_dict()
+ self.assertEqual(payload['target']['typeURI'],
+ 'service/compute/os-hosts/host/reboot')
+ self.assertEqual(payload['action'], 'start/reboot')
+ self.assertEqual(payload['outcome'], 'pending')
+
+ def test_custom_action_complex(self):
+ req = self.api_request('GET', 'http://admin_host:8774/v2/'
+ + str(uuid.uuid4()) + '/os-migrations')
+ payload = req.environ['cadf_event'].as_dict()
+ self.assertEqual(payload['target']['typeURI'],
+ 'service/compute/os-migrations')
+ self.assertEqual(payload['action'], 'read')
+ req = self.api_request('POST', 'http://admin_host:8774/v2/'
+ + str(uuid.uuid4()) + '/os-migrations')
+ payload = req.environ['cadf_event'].as_dict()
+ self.assertEqual(payload['target']['typeURI'],
+ 'service/compute/os-migrations')
+ self.assertEqual(payload['action'], 'create')
+
+ def test_response_mod_msg(self):
+ req = self.api_request('GET', 'http://admin_host:8774/v2/'
+ + str(uuid.uuid4()) + '/servers')
+ payload = req.environ['cadf_event'].as_dict()
+ self.middleware._process_response(req, webob.Response())
+ payload2 = req.environ['cadf_event'].as_dict()
+ self.assertEqual(payload['id'], payload2['id'])
+ self.assertEqual(payload['tags'], payload2['tags'])
+ self.assertEqual(payload2['outcome'], 'success')
+ self.assertEqual(payload2['reason']['reasonType'], 'HTTP')
+ self.assertEqual(payload2['reason']['reasonCode'], '200')
+ self.assertEqual(len(payload2['reporterchain']), 1)
+ self.assertEqual(payload2['reporterchain'][0]['role'], 'modifier')
+ self.assertEqual(payload2['reporterchain'][0]['reporter']['id'],
+ 'target')
+
+ def test_no_response(self):
+ req = self.api_request('GET', 'http://admin_host:8774/v2/'
+ + str(uuid.uuid4()) + '/servers')
+ payload = req.environ['cadf_event'].as_dict()
+ self.middleware._process_response(req, None)
+ payload2 = req.environ['cadf_event'].as_dict()
+ self.assertEqual(payload['id'], payload2['id'])
+ self.assertEqual(payload['tags'], payload2['tags'])
+ self.assertEqual(payload2['outcome'], 'unknown')
+ self.assertNotIn('reason', payload2)
+ self.assertEqual(len(payload2['reporterchain']), 1)
+ self.assertEqual(payload2['reporterchain'][0]['role'], 'modifier')
+ self.assertEqual(payload2['reporterchain'][0]['reporter']['id'],
+ 'target')
+
+ def test_missing_req(self):
+ req = webob.Request.blank('http://admin_host:8774/v2/'
+ + str(uuid.uuid4()) + '/servers',
+ environ=self.get_environ_header('GET'))
+ self.assertNotIn('cadf_event', req.environ)
+ self.middleware._process_response(req, webob.Response())
+ self.assertIn('cadf_event', req.environ)
+ payload = req.environ['cadf_event'].as_dict()
+ self.assertEqual(payload['outcome'], 'success')
+ self.assertEqual(payload['reason']['reasonType'], 'HTTP')
+ self.assertEqual(payload['reason']['reasonCode'], '200')
+ self.assertEqual(payload['observer']['id'], 'target')
+
+ def test_missing_catalog_endpoint_id(self):
+ env_headers = {'HTTP_X_SERVICE_CATALOG':
+ '''[{"endpoints_links": [],
+ "endpoints": [{"adminURL":
+ "http://admin_host:8774",
+ "region": "RegionOne",
+ "publicURL":
+ "http://public_host:8774",
+ "internalURL":
+ "http://internal_host:8774"}],
+ "type": "compute",
+ "name": "nova"},]''',
+ 'HTTP_X_USER_ID': 'user_id',
+ 'HTTP_X_USER_NAME': 'user_name',
+ 'HTTP_X_AUTH_TOKEN': 'token',
+ 'HTTP_X_PROJECT_ID': 'tenant_id',
+ 'HTTP_X_IDENTITY_STATUS': 'Confirmed',
+ 'REQUEST_METHOD': 'GET'}
+ req = webob.Request.blank('http://admin_host:8774/v2/'
+ + str(uuid.uuid4()) + '/servers',
+ environ=env_headers)
+ self.middleware._process_request(req)
+ payload = req.environ['cadf_event'].as_dict()
+ self.assertEqual(payload['target']['id'], identifier.norm_ns('nova'))
diff --git a/keystonemiddleware-moon/keystonemiddleware/tests/unit/test_opts.py b/keystonemiddleware-moon/keystonemiddleware/tests/unit/test_opts.py
new file mode 100644
index 00000000..93e1b06e
--- /dev/null
+++ b/keystonemiddleware-moon/keystonemiddleware/tests/unit/test_opts.py
@@ -0,0 +1,85 @@
+# Copyright (c) 2014 OpenStack Foundation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import pkg_resources
+from testtools import matchers
+
+from keystonemiddleware import opts
+from keystonemiddleware.tests.unit import utils
+
+
+class OptsTestCase(utils.TestCase):
+
+ def _test_list_auth_token_opts(self, result):
+ self.assertThat(result, matchers.HasLength(1))
+
+ for group in (g for (g, _l) in result):
+ self.assertEqual('keystone_authtoken', group)
+
+ expected_opt_names = [
+ 'auth_admin_prefix',
+ 'auth_host',
+ 'auth_port',
+ 'auth_protocol',
+ 'auth_uri',
+ 'identity_uri',
+ 'auth_version',
+ 'delay_auth_decision',
+ 'http_connect_timeout',
+ 'http_request_max_retries',
+ 'admin_token',
+ 'admin_user',
+ 'admin_password',
+ 'admin_tenant_name',
+ 'cache',
+ 'certfile',
+ 'keyfile',
+ 'cafile',
+ 'insecure',
+ 'signing_dir',
+ 'memcached_servers',
+ 'token_cache_time',
+ 'revocation_cache_time',
+ 'memcache_security_strategy',
+ 'memcache_secret_key',
+ 'memcache_use_advanced_pool',
+ 'memcache_pool_dead_retry',
+ 'memcache_pool_maxsize',
+ 'memcache_pool_unused_timeout',
+ 'memcache_pool_conn_get_timeout',
+ 'memcache_pool_socket_timeout',
+ 'include_service_catalog',
+ 'enforce_token_bind',
+ 'check_revocations_for_cached',
+ 'hash_algorithms'
+ ]
+ opt_names = [o.name for (g, l) in result for o in l]
+ self.assertThat(opt_names, matchers.HasLength(len(expected_opt_names)))
+
+ for opt in opt_names:
+ self.assertIn(opt, expected_opt_names)
+
+ def test_list_auth_token_opts(self):
+ self._test_list_auth_token_opts(opts.list_auth_token_opts())
+
+ def test_entry_point(self):
+ result = None
+ for ep in pkg_resources.iter_entry_points('oslo.config.opts'):
+ if ep.name == 'keystonemiddleware.auth_token':
+ list_fn = ep.load()
+ result = list_fn()
+ break
+
+ self.assertIsNotNone(result)
+ self._test_list_auth_token_opts(result)
diff --git a/keystonemiddleware-moon/keystonemiddleware/tests/unit/test_s3_token_middleware.py b/keystonemiddleware-moon/keystonemiddleware/tests/unit/test_s3_token_middleware.py
new file mode 100644
index 00000000..2bcdf894
--- /dev/null
+++ b/keystonemiddleware-moon/keystonemiddleware/tests/unit/test_s3_token_middleware.py
@@ -0,0 +1,235 @@
+# Copyright 2012 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import mock
+from oslo_serialization import jsonutils
+import requests
+from requests_mock.contrib import fixture as rm_fixture
+import six
+import testtools
+import webob
+
+from keystonemiddleware import s3_token
+from keystonemiddleware.tests.unit import utils
+
+
+GOOD_RESPONSE = {'access': {'token': {'id': 'TOKEN_ID',
+ 'tenant': {'id': 'TENANT_ID'}}}}
+
+
+class FakeApp(object):
+ """This represents a WSGI app protected by the auth_token middleware."""
+ def __call__(self, env, start_response):
+ resp = webob.Response()
+ resp.environ = env
+ return resp(env, start_response)
+
+
+class S3TokenMiddlewareTestBase(utils.TestCase):
+
+ TEST_PROTOCOL = 'https'
+ TEST_HOST = 'fakehost'
+ TEST_PORT = 35357
+ TEST_URL = '%s://%s:%d/v2.0/s3tokens' % (TEST_PROTOCOL,
+ TEST_HOST,
+ TEST_PORT)
+
+ def setUp(self):
+ super(S3TokenMiddlewareTestBase, self).setUp()
+
+ self.conf = {
+ 'auth_host': self.TEST_HOST,
+ 'auth_port': self.TEST_PORT,
+ 'auth_protocol': self.TEST_PROTOCOL,
+ }
+
+ self.requests = self.useFixture(rm_fixture.Fixture())
+
+ def start_fake_response(self, status, headers):
+ self.response_status = int(status.split(' ', 1)[0])
+ self.response_headers = dict(headers)
+
+
+class S3TokenMiddlewareTestGood(S3TokenMiddlewareTestBase):
+
+ def setUp(self):
+ super(S3TokenMiddlewareTestGood, self).setUp()
+ self.middleware = s3_token.S3Token(FakeApp(), self.conf)
+
+ self.requests.post(self.TEST_URL, status_code=201, json=GOOD_RESPONSE)
+
+ # Ignore the request and pass to the next middleware in the
+ # pipeline if no path has been specified.
+ def test_no_path_request(self):
+ req = webob.Request.blank('/')
+ self.middleware(req.environ, self.start_fake_response)
+ self.assertEqual(self.response_status, 200)
+
+ # Ignore the request and pass to the next middleware in the
+ # pipeline if no Authorization header has been specified
+ def test_without_authorization(self):
+ req = webob.Request.blank('/v1/AUTH_cfa/c/o')
+ self.middleware(req.environ, self.start_fake_response)
+ self.assertEqual(self.response_status, 200)
+
+ def test_without_auth_storage_token(self):
+ req = webob.Request.blank('/v1/AUTH_cfa/c/o')
+ req.headers['Authorization'] = 'badboy'
+ self.middleware(req.environ, self.start_fake_response)
+ self.assertEqual(self.response_status, 200)
+
+ def test_authorized(self):
+ req = webob.Request.blank('/v1/AUTH_cfa/c/o')
+ req.headers['Authorization'] = 'access:signature'
+ req.headers['X-Storage-Token'] = 'token'
+ req.get_response(self.middleware)
+ self.assertTrue(req.path.startswith('/v1/AUTH_TENANT_ID'))
+ self.assertEqual(req.headers['X-Auth-Token'], 'TOKEN_ID')
+
+ def test_authorized_http(self):
+ self.requests.post(self.TEST_URL.replace('https', 'http'),
+ status_code=201,
+ json=GOOD_RESPONSE)
+
+ self.middleware = (
+ s3_token.filter_factory({'auth_protocol': 'http',
+ 'auth_host': self.TEST_HOST,
+ 'auth_port': self.TEST_PORT})(FakeApp()))
+ req = webob.Request.blank('/v1/AUTH_cfa/c/o')
+ req.headers['Authorization'] = 'access:signature'
+ req.headers['X-Storage-Token'] = 'token'
+ req.get_response(self.middleware)
+ self.assertTrue(req.path.startswith('/v1/AUTH_TENANT_ID'))
+ self.assertEqual(req.headers['X-Auth-Token'], 'TOKEN_ID')
+
+ def test_authorization_nova_toconnect(self):
+ req = webob.Request.blank('/v1/AUTH_swiftint/c/o')
+ req.headers['Authorization'] = 'access:FORCED_TENANT_ID:signature'
+ req.headers['X-Storage-Token'] = 'token'
+ req.get_response(self.middleware)
+ path = req.environ['PATH_INFO']
+ self.assertTrue(path.startswith('/v1/AUTH_FORCED_TENANT_ID'))
+
+ @mock.patch.object(requests, 'post')
+ def test_insecure(self, MOCK_REQUEST):
+ self.middleware = (
+ s3_token.filter_factory({'insecure': True})(FakeApp()))
+
+ text_return_value = jsonutils.dumps(GOOD_RESPONSE)
+ if six.PY3:
+ text_return_value = text_return_value.encode()
+ MOCK_REQUEST.return_value = utils.TestResponse({
+ 'status_code': 201,
+ 'text': text_return_value})
+
+ req = webob.Request.blank('/v1/AUTH_cfa/c/o')
+ req.headers['Authorization'] = 'access:signature'
+ req.headers['X-Storage-Token'] = 'token'
+ req.get_response(self.middleware)
+
+ self.assertTrue(MOCK_REQUEST.called)
+ mock_args, mock_kwargs = MOCK_REQUEST.call_args
+ self.assertIs(mock_kwargs['verify'], False)
+
+
+class S3TokenMiddlewareTestBad(S3TokenMiddlewareTestBase):
+ def setUp(self):
+ super(S3TokenMiddlewareTestBad, self).setUp()
+ self.middleware = s3_token.S3Token(FakeApp(), self.conf)
+
+ def test_unauthorized_token(self):
+ ret = {"error":
+ {"message": "EC2 access key not found.",
+ "code": 401,
+ "title": "Unauthorized"}}
+ self.requests.post(self.TEST_URL, status_code=403, json=ret)
+ req = webob.Request.blank('/v1/AUTH_cfa/c/o')
+ req.headers['Authorization'] = 'access:signature'
+ req.headers['X-Storage-Token'] = 'token'
+ resp = req.get_response(self.middleware)
+ s3_denied_req = self.middleware._deny_request('AccessDenied')
+ self.assertEqual(resp.body, s3_denied_req.body)
+ self.assertEqual(resp.status_int, s3_denied_req.status_int)
+
+ def test_bogus_authorization(self):
+ req = webob.Request.blank('/v1/AUTH_cfa/c/o')
+ req.headers['Authorization'] = 'badboy'
+ req.headers['X-Storage-Token'] = 'token'
+ resp = req.get_response(self.middleware)
+ self.assertEqual(resp.status_int, 400)
+ s3_invalid_req = self.middleware._deny_request('InvalidURI')
+ self.assertEqual(resp.body, s3_invalid_req.body)
+ self.assertEqual(resp.status_int, s3_invalid_req.status_int)
+
+ def test_fail_to_connect_to_keystone(self):
+ with mock.patch.object(self.middleware, '_json_request') as o:
+ s3_invalid_req = self.middleware._deny_request('InvalidURI')
+ o.side_effect = s3_token.ServiceError(s3_invalid_req)
+
+ req = webob.Request.blank('/v1/AUTH_cfa/c/o')
+ req.headers['Authorization'] = 'access:signature'
+ req.headers['X-Storage-Token'] = 'token'
+ resp = req.get_response(self.middleware)
+ self.assertEqual(resp.body, s3_invalid_req.body)
+ self.assertEqual(resp.status_int, s3_invalid_req.status_int)
+
+ def test_bad_reply(self):
+ self.requests.post(self.TEST_URL, status_code=201, text="<badreply>")
+
+ req = webob.Request.blank('/v1/AUTH_cfa/c/o')
+ req.headers['Authorization'] = 'access:signature'
+ req.headers['X-Storage-Token'] = 'token'
+ resp = req.get_response(self.middleware)
+ s3_invalid_req = self.middleware._deny_request('InvalidURI')
+ self.assertEqual(resp.body, s3_invalid_req.body)
+ self.assertEqual(resp.status_int, s3_invalid_req.status_int)
+
+
+class S3TokenMiddlewareTestUtil(testtools.TestCase):
+ def test_split_path_failed(self):
+ self.assertRaises(ValueError, s3_token._split_path, '')
+ self.assertRaises(ValueError, s3_token._split_path, '/')
+ self.assertRaises(ValueError, s3_token._split_path, '//')
+ self.assertRaises(ValueError, s3_token._split_path, '//a')
+ self.assertRaises(ValueError, s3_token._split_path, '/a/c')
+ self.assertRaises(ValueError, s3_token._split_path, '//c')
+ self.assertRaises(ValueError, s3_token._split_path, '/a/c/')
+ self.assertRaises(ValueError, s3_token._split_path, '/a//')
+ self.assertRaises(ValueError, s3_token._split_path, '/a', 2)
+ self.assertRaises(ValueError, s3_token._split_path, '/a', 2, 3)
+ self.assertRaises(ValueError, s3_token._split_path, '/a', 2, 3, True)
+ self.assertRaises(ValueError, s3_token._split_path, '/a/c/o/r', 3, 3)
+ self.assertRaises(ValueError, s3_token._split_path, '/a', 5, 4)
+
+ def test_split_path_success(self):
+ self.assertEqual(s3_token._split_path('/a'), ['a'])
+ self.assertEqual(s3_token._split_path('/a/'), ['a'])
+ self.assertEqual(s3_token._split_path('/a/c', 2), ['a', 'c'])
+ self.assertEqual(s3_token._split_path('/a/c/o', 3), ['a', 'c', 'o'])
+ self.assertEqual(s3_token._split_path('/a/c/o/r', 3, 3, True),
+ ['a', 'c', 'o/r'])
+ self.assertEqual(s3_token._split_path('/a/c', 2, 3, True),
+ ['a', 'c', None])
+ self.assertEqual(s3_token._split_path('/a/c/', 2), ['a', 'c'])
+ self.assertEqual(s3_token._split_path('/a/c/', 2, 3), ['a', 'c', ''])
+
+ def test_split_path_invalid_path(self):
+ try:
+ s3_token._split_path('o\nn e', 2)
+ except ValueError as err:
+ self.assertEqual(str(err), 'Invalid path: o%0An%20e')
+ try:
+ s3_token._split_path('o\nn e', 2, 3, True)
+ except ValueError as err:
+ self.assertEqual(str(err), 'Invalid path: o%0An%20e')
diff --git a/keystonemiddleware-moon/keystonemiddleware/tests/unit/utils.py b/keystonemiddleware-moon/keystonemiddleware/tests/unit/utils.py
new file mode 100644
index 00000000..da6f347a
--- /dev/null
+++ b/keystonemiddleware-moon/keystonemiddleware/tests/unit/utils.py
@@ -0,0 +1,138 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import logging
+import sys
+import time
+
+import fixtures
+import mock
+import requests
+import testtools
+import uuid
+
+
+class TestCase(testtools.TestCase):
+ TEST_DOMAIN_ID = '1'
+ TEST_DOMAIN_NAME = 'aDomain'
+ TEST_GROUP_ID = uuid.uuid4().hex
+ TEST_ROLE_ID = uuid.uuid4().hex
+ TEST_TENANT_ID = '1'
+ TEST_TENANT_NAME = 'aTenant'
+ TEST_TOKEN = 'aToken'
+ TEST_TRUST_ID = 'aTrust'
+ TEST_USER = 'test'
+ TEST_USER_ID = uuid.uuid4().hex
+
+ TEST_ROOT_URL = 'http://127.0.0.1:5000/'
+
+ def setUp(self):
+ super(TestCase, self).setUp()
+ self.logger = self.useFixture(fixtures.FakeLogger(level=logging.DEBUG))
+ self.time_patcher = mock.patch.object(time, 'time', lambda: 1234)
+ self.time_patcher.start()
+
+ def tearDown(self):
+ self.time_patcher.stop()
+ super(TestCase, self).tearDown()
+
+
+if tuple(sys.version_info)[0:2] < (2, 7):
+
+ def assertDictEqual(self, d1, d2, msg=None):
+ # Simple version taken from 2.7
+ self.assertIsInstance(d1, dict,
+ 'First argument is not a dictionary')
+ self.assertIsInstance(d2, dict,
+ 'Second argument is not a dictionary')
+ if d1 != d2:
+ if msg:
+ self.fail(msg)
+ else:
+ standardMsg = '%r != %r' % (d1, d2)
+ self.fail(standardMsg)
+
+ TestCase.assertDictEqual = assertDictEqual
+
+
+class TestResponse(requests.Response):
+ """Class used to wrap requests.Response and provide some
+ convenience to initialize with a dict.
+ """
+
+ def __init__(self, data):
+ self._text = None
+ super(TestResponse, self).__init__()
+ if isinstance(data, dict):
+ self.status_code = data.get('status_code', 200)
+ headers = data.get('headers')
+ if headers:
+ self.headers.update(headers)
+ # Fake the text attribute to streamline Response creation
+ # _content is defined by requests.Response
+ self._content = data.get('text')
+ else:
+ self.status_code = data
+
+ def __eq__(self, other):
+ return self.__dict__ == other.__dict__
+
+ @property
+ def text(self):
+ return self.content
+
+
+class DisableModuleFixture(fixtures.Fixture):
+ """A fixture to provide support for unloading/disabling modules."""
+
+ def __init__(self, module, *args, **kw):
+ super(DisableModuleFixture, self).__init__(*args, **kw)
+ self.module = module
+ self._finders = []
+ self._cleared_modules = {}
+
+ def tearDown(self):
+ super(DisableModuleFixture, self).tearDown()
+ for finder in self._finders:
+ sys.meta_path.remove(finder)
+ sys.modules.update(self._cleared_modules)
+
+ def clear_module(self):
+ cleared_modules = {}
+ for fullname in sys.modules.keys():
+ if (fullname == self.module or
+ fullname.startswith(self.module + '.')):
+ cleared_modules[fullname] = sys.modules.pop(fullname)
+ return cleared_modules
+
+ def setUp(self):
+ """Ensure ImportError for the specified module."""
+
+ super(DisableModuleFixture, self).setUp()
+
+ # Clear 'module' references in sys.modules
+ self._cleared_modules.update(self.clear_module())
+
+ finder = NoModuleFinder(self.module)
+ self._finders.append(finder)
+ sys.meta_path.insert(0, finder)
+
+
+class NoModuleFinder(object):
+ """Disallow further imports of 'module'."""
+
+ def __init__(self, module):
+ self.module = module
+
+ def find_module(self, fullname, path):
+ if fullname == self.module or fullname.startswith(self.module + '.'):
+ raise ImportError